diff --git a/.circleci/config.yml b/.circleci/config.yml index efb2140b9c..8df94018cd 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,10 +3,10 @@ version: 2.1 parameters: cli_download_base_url: type: string - default: 'https://static.snyk.io/' + default: 'https://downloads.snyk.io/' fips_cli_download_base_url: type: string - default: 'https://static.snyk.io/fips/' + default: 'https://downloads.snyk.io/fips/' go_download_base_url: type: string default: 'https://storage.googleapis.com/golang/' @@ -35,8 +35,12 @@ orbs: executors: alpine: docker: - - image: alpine:3.17 + - image: alpine:3.20 resource_class: xlarge + alpine-arm64: + docker: + - image: alpine:3.20 + resource_class: arm.xlarge generic-ubuntu: docker: - image: ubuntu:latest @@ -56,6 +60,11 @@ executors: - image: snyklabs/cli-build-arm64:20240814-161347 working_directory: /mnt/ramdisk/snyk resource_class: arm.large + docker-arm64-xl: + docker: + - image: snyklabs/cli-build-arm64:20240814-161347 + working_directory: /mnt/ramdisk/snyk + resource_class: arm.xlarge linux-ubuntu-mantic-amd64: docker: - image: ubuntu:mantic @@ -101,6 +110,11 @@ executors: # https://circleci.com/docs/2.0/testing-ios/#supported-xcode-versions xcode: '14.3.1' resource_class: macos.m1.medium.gen1 + macos-arm64-large: + macos: + # https://circleci.com/docs/2.0/testing-ios/#supported-xcode-versions + xcode: '14.3.1' + resource_class: macos.m1.large.gen1 win-server2022-amd64: machine: image: windows-server-2022-gui:2024.01.1 @@ -233,7 +247,7 @@ commands: install-deps-windows-full-signing: steps: - install-deps-windows-signing - - install-deps-windows-full + - install-deps-windows-make install-deps-windows-make: steps: @@ -329,12 +343,21 @@ commands: - run: name: Installing Node.js + other test dependencies command: | - apk add --update nodejs npm bash maven git gradle python3 py3-pip elixir composer + arch=$(uname -m) + case "$arch" in + aarch64) arch="arm64";; + *) arch="amd64";; + esac + echo "using architecture: $arch" + apk add --update nodejs npm bash maven git python3 py3-pip elixir composer unzip pip3 install pipenv requests PyYAML setuptools==70.3.0 wget https://dot.net/v1/dotnet-install.sh -O dotnet-install.sh && chmod +x dotnet-install.sh && ./dotnet-install.sh - wget https://go.dev/dl/go<< pipeline.parameters.go_version >>.linux-amd64.tar.gz -O /tmp/go.tgz && \ + wget https://go.dev/dl/go<< pipeline.parameters.go_version >>.linux-$arch.tar.gz -O /tmp/go.tgz && \ tar -C /usr/local -xzvf /tmp/go.tgz && \ ln -s /usr/local/go/bin/go /usr/local/bin + wget https://services.gradle.org/distributions/gradle-8.10-bin.zip -O /tmp/gradle.zip && \ + unzip -d /usr/local /tmp/gradle.zip && \ + ln -s /usr/local/gradle-8.10/bin/gradle /usr/local/bin failed-release-notification: steps: @@ -451,7 +474,7 @@ workflows: go_arch: amd64 go_download_base_url: << pipeline.parameters.fips_go_download_base_url >> make_target: build clean-golang build-fips - executor: docker-amd64 + executor: docker-amd64-xl requires: - prepare-build @@ -462,7 +485,7 @@ workflows: go_arch: arm64 go_download_base_url: << pipeline.parameters.fips_go_download_base_url >> make_target: build clean-golang build-fips - executor: docker-arm64 + executor: docker-arm64-xl requires: - prepare-build @@ -472,7 +495,18 @@ workflows: go_os: linux go_arch: amd64 go_download_base_url: << pipeline.parameters.go_download_base_url >> - executor: docker-amd64 + executor: docker-amd64-xl + c_compiler: /usr/bin/musl-gcc + requires: + - prepare-build + + - build-artifact: + name: build alpine arm64 + go_target_os: alpine + go_os: linux + go_arch: arm64 + go_download_base_url: << pipeline.parameters.go_download_base_url >> + executor: docker-arm64-xl c_compiler: /usr/bin/musl-gcc requires: - prepare-build @@ -483,7 +517,7 @@ workflows: go_os: darwin go_arch: amd64 go_download_base_url: << pipeline.parameters.go_download_base_url >> - executor: macos-arm64 + executor: macos-arm64-large install_deps_extension: macos-build context: snyk-macos-signing requires: @@ -495,7 +529,7 @@ workflows: go_os: darwin go_arch: arm64 go_download_base_url: << pipeline.parameters.go_download_base_url >> - executor: macos-arm64 + executor: macos-arm64-large install_deps_extension: macos-build context: snyk-macos-signing requires: @@ -590,6 +624,23 @@ workflows: install_deps_extension: alpine-full dont_skip_tests: 0 + - acceptance-tests: + name: acceptance-tests alpine arm64 + context: + - nodejs-install + - team_hammerhead-cli + filters: + branches: + ignore: + - main + - '/release.*/' + requires: + - build alpine arm64 + executor: alpine-arm64 + test_snyk_command: ./binary-releases/snyk-alpine-arm64 + install_deps_extension: alpine-full + dont_skip_tests: 0 + - acceptance-tests: name: acceptance-tests macOS amd64 context: @@ -622,7 +673,7 @@ workflows: test_snyk_command: binary-releases\\snyk-win.exe install_deps_extension: windows-full dont_skip_tests: 0 - shards: 4 + shards: 8 pre_test_cmds: Import-Module $env:ChocolateyInstall\helpers\chocolateyProfile.psm1; RefreshEnv - sign: @@ -686,6 +737,7 @@ workflows: - sign macos arm64 - sign windows amd64 - build alpine amd64 + - build alpine arm64 - build linux amd64 - build linux arm64 - build fix & protect @@ -1127,6 +1179,8 @@ jobs: default: 3 executor: << parameters.executor >> parallelism: << parameters.shards >> + environment: + PIP_BREAK_SYSTEM_PACKAGES: 1 steps: - prepare-workspace - install-deps-<< parameters.install_deps_extension >> @@ -1371,6 +1425,9 @@ jobs: - run: name: Publish to GitHub command: ./release-scripts/upload-artifacts.sh github + - run: + name: Create Jira Release + command: ./release-scripts/create-jira-release.sh - failed-release-notification release-npm: diff --git a/.github/workflows/smoke-tests.yml b/.github/workflows/smoke-tests.yml index 81697b999e..9412b8ec9e 100644 --- a/.github/workflows/smoke-tests.yml +++ b/.github/workflows/smoke-tests.yml @@ -18,14 +18,14 @@ jobs: matrix: os: [ubuntu, macos, windows] snyk_install_method: [binary, npm, yarn, brew] - node_version: [16, 18, 20] + node_version: [18, 20] exclude: # Skip yarn for Windows, as it's a bit crazy to get it working in CI environment. Unless we see evidence we need it, I'd avoid it - snyk_install_method: yarn os: windows # For binary, use only the Node 18 - snyk_install_method: binary - node_version: 16 + node_version: 18 # No need to run brew tests on some Platforms - snyk_install_method: brew os: ubuntu @@ -40,14 +40,14 @@ jobs: snyk_cli_dl_file: snyk-macos - snyk_install_method: alpine-binary os: ubuntu - node_version: 16 + node_version: 18 snyk_cli_dl_file: snyk-alpine - snyk_install_method: npm-root-user os: ubuntu - node_version: 16 + node_version: 18 - snyk_install_method: docker-bundle os: macos - node_version: 16 + node_version: 18 snyk_cli_dl_file: snyk-for-docker-desktop-darwin-x64.tar.gz steps: diff --git a/.github/workflows/sync-cli-help-to-user-docs.yml b/.github/workflows/sync-cli-help-to-user-docs.yml index b0d6819577..8720b04080 100644 --- a/.github/workflows/sync-cli-help-to-user-docs.yml +++ b/.github/workflows/sync-cli-help-to-user-docs.yml @@ -3,7 +3,7 @@ name: Synchronize Help on: workflow_dispatch: schedule: - - cron: '0 12 * * 1-5' # Mon-Fri at 12 + - cron: '0 9 * * 1' # Mon at 9 push: branches: [chore/docs-action] diff --git a/.gitignore b/.gitignore index f4a146b0f5..da08548bb9 100644 --- a/.gitignore +++ b/.gitignore @@ -37,6 +37,8 @@ report.[0-9]*.[0-9]*.[0-9]*.[0-9]*.json /test/acceptance/workspaces/**/project/ /test/acceptance/workspaces/**/target/ test/acceptance/workspaces/**/.gradle +test/acceptance/workspaces/**/.build +test/acceptance/workspaces/**/Package.resolved test/**/.gradle .iac-data .dccache @@ -47,7 +49,7 @@ tap-output .tap # Jest coverage -test/fixtures/basic-swift/.build -test/fixtures/basic-swift/Package.resolved +test/fixtures/**/*/.build +test/fixtures/**/*/Package.resolved scripts/Brewfile.lock.json test/fixtures/**/go.sum \ No newline at end of file diff --git a/.gitleaksignore b/.gitleaksignore index c99df68d86..2d62f06f11 100644 --- a/.gitleaksignore +++ b/.gitleaksignore @@ -84,3 +84,4 @@ c2de35484dcad696a6ee32f2fa317d5cfaffc133:test/fixtures/code/sample-analyze-folde 25f37b4c609380452b0b96c3853b69e4dc29bb48:test/jest/unit/lib/iac/drift/fixtures/all.console:aws-access-token:98 ccd03cce97470452766ab397f2ba770dbb2e002e:test/jest/unit/lib/iac/drift/fixtures/all.console:aws-access-token:98 test/jest/acceptance/instrumentation.spec.ts:snyk-api-token:19 +1b65935bc7c69b1029d7c63808af211ae6030c98:test/fixtures/sast/shallow_sast_webgoat/JWTFinalEndpointTest.java:jwt:31 diff --git a/.snyk b/.snyk index 504111a482..e9fc405e6d 100644 --- a/.snyk +++ b/.snyk @@ -10,7 +10,7 @@ ignore: SNYK-JS-LODASHSET-1320032: - '*': reason: No upgrade path currently available - expires: 2024-09-13T13:14:22.120Z + expires: 2024-09-30T10:00:00.000Z created: 2023-09-13T13:14:22.120Z 'snyk:lic:npm:shescape:MPL-2.0': - '*': @@ -25,7 +25,7 @@ ignore: SNYK-JS-MICROMATCH-6838728: - '*': reason: Direct usage within Snyk CLI are not using vulnerable function - expires: 2024-08-13T04:12:20.523Z + expires: 2024-10-13T04:12:20.523Z created: 2024-05-14T04:12:20.531Z patch: {} exclude: diff --git a/.vscode/launch.json b/.vscode/launch.json index ed88310f46..24379cbb5c 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -29,6 +29,38 @@ "${workspaceRoot}/node_modules/.bin/jest", "--runInBand" ] + }, + { + "type": "node", + "request": "launch", + "name": "snyk test", + "console": "integratedTerminal", + "program": "node_modules/.bin/ts-node", + "args": ["./src/cli/index.ts", "test"] + }, + { + "type": "node", + "request": "launch", + "name": "snyk test --unmanaged", + "console": "integratedTerminal", + "program": "node_modules/.bin/ts-node", + "args": ["./src/cli/index.ts", "test", "--unmanaged"] + }, + { + "type": "node", + "request": "launch", + "name": "snyk container test", + "console": "integratedTerminal", + "program": "node_modules/.bin/ts-node", + "args": ["./src/cli/index.ts", "container", "test", "snyk/snyk:linux"] + }, + { + "type": "node", + "request": "launch", + "name": "snyk iac test", + "console": "integratedTerminal", + "program": "node_modules/.bin/ts-node", + "args": ["./src/cli/index.ts", "iac", "test", "--unmanaged"] } ] } diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 9d8a9e41dd..f00ca71012 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -39,6 +39,8 @@ To build the project, run the following command in the root of the repository. ```sh make build +# or +make build-debug ``` Run the build binary like this. @@ -47,6 +49,30 @@ Run the build binary like this. ./binary-releases/snyk-macos --version ``` +## Debugging the go binary with VSCode + +1. Build the cli using `make build-debug` + +2. Save the `Installing` path from the build output + +3. Open your `.vscode/launch.json` file and add the following under `configurations` + +```json +{ + "name": "Attach to Go Process", + "type": "go", + "request": "attach", + "mode": "local", + "remotePath": "" +} +``` + +4. Add your break points + +5. Run the cli from your build path, you will see a prompt to attach a Debugger + +6. Run "Attach to Go Process" from under your debug tab + ## Running tests You can run tests using standard Jest commands. See: [Jest CLI docs](https://jestjs.io/docs/cli). diff --git a/Makefile b/Makefile index bbf9fad66c..7c1bba3d8b 100644 --- a/Makefile +++ b/Makefile @@ -119,6 +119,10 @@ $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine: prepack | $(BINARY_RELEASES_FOLDER $(PKG) -t node$(PKG_NODE_VERSION)-alpine-x64 -o $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine $(MAKE) $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine.sha256 +$(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine-arm64: prepack | $(BINARY_RELEASES_FOLDER_TS_CLI) + $(PKG) -t node$(PKG_NODE_VERSION)-alpine-arm64 -o $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine-arm64 --no-bytecode + $(MAKE) $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-alpine-arm64.sha256 + $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-linux: prepack | $(BINARY_RELEASES_FOLDER_TS_CLI) $(PKG) -t node$(PKG_NODE_VERSION)-linux-x64 -o $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-linux $(MAKE) $(BINARY_RELEASES_FOLDER_TS_CLI)/snyk-linux.sha256 @@ -266,7 +270,7 @@ release-pre: @echo "-- Validating artifacts" @./release-scripts/validate-checksums.sh @echo "-- Validating upload permissions" - @./release-scripts/upload-artifacts.sh --dry-run preview latest github npm + @./release-scripts/upload-artifacts.sh --dry-run latest github npm @echo "-- Publishing to S3 /version" @./release-scripts/upload-artifacts.sh version diff --git a/binary-deployments.json b/binary-deployments.json index edd548e616..2febdc2abf 100644 --- a/binary-deployments.json +++ b/binary-deployments.json @@ -7,7 +7,8 @@ "amd64": "snyk-win.exe" }, "alpine": { - "amd64": "snyk-alpine" + "amd64": "snyk-alpine", + "arm64": "snyk-alpine-arm64" }, "linux": { "amd64": "snyk-linux", diff --git a/binary-releases/RELEASE_NOTES.md b/binary-releases/RELEASE_NOTES.md index dfc0d90f1a..53865ed744 100644 --- a/binary-releases/RELEASE_NOTES.md +++ b/binary-releases/RELEASE_NOTES.md @@ -1,7 +1,51 @@ -## [1.1293.1](https://github.com/snyk/snyk/compare/v1.1293.0...v1.1293.1) (2024-09-10) +## [1.1294.0](https://github.com/snyk/snyk/compare/v1.1293.0...v1.1294.0) (2024-10-11) The Snyk CLI is being deployed to different deployment channels, users can select the stability level according to their needs. For details please see [this documentation](https://docs.snyk.io/snyk-cli/releases-and-channels-for-the-snyk-cli) -### News +### Features + +* add CycloneDX 1.6 SBOM support ([1330fc2](https://github.com/snyk/snyk/commit/1330fc2442e48865ea2e1b27a94cf665ff4b0416)) +* add data transformation workflow [CLI-502] ([2cd3bfd](https://github.com/snyk/snyk/commit/2cd3bfd298b423ea632906cdd9b24ee5eac1c6d3)) +* automatic integration of language server 45d38517ca31d0dcbb30d35bc235b187f0c33156 ([e60dda0](https://github.com/snyk/snyk/commit/e60dda0c8c8d48a59a1260b221d5ac8bbc616093)) +* automatic integration of language server 56a46746f0be9d0ad20bc1c31e9aa8a66c8c31dc ([e5d8b68](https://github.com/snyk/snyk/commit/e5d8b68901b5934d7331a97ac56216532e1ece22)) +* automatic integration of language server 749398323e1918d99214e797aaf18adf0492d0a6 ([24cfd5a](https://github.com/snyk/snyk/commit/24cfd5ad30186089114307328d04bbfc8d11dc6f)) +* automatic integration of language server 871df13e7984636feb2c61570bff2d117828d8a3 ([507d402](https://github.com/snyk/snyk/commit/507d4023a3aaf78ee5340246946c43b9501b257e)) +* automatic integration of language server a8e770a8dcb46ad14861001a969c03694d7c2a30 ([0c22b2a](https://github.com/snyk/snyk/commit/0c22b2aa5329d3e5ca00ff23b7ec7472432b89ca)) +* automatic integration of language server e23b2e02a33de2f722a579a8fa10cccfa3d80d84 ([a3037bd](https://github.com/snyk/snyk/commit/a3037bd81389dbd7054b84ece9919ff87b9a44e9)) +* automatic integration of language server f340bd73b5146a0653b2126e7900c467c89e4398 ([0637bca](https://github.com/snyk/snyk/commit/0637bca3a1440e73b2ed874217db605732b6dee6)) +* automatic integration of language server f45a1a9e861a2f67a2aa6e624b755a411333298a ([771dce7](https://github.com/snyk/snyk/commit/771dce7ac5fc2daead08dba9187186bb81e4f687)) +* automatic integration of language server fa9fa4069fc2cd94b0b9aca67c27d2e7fd7ddacd ([ac946d1](https://github.com/snyk/snyk/commit/ac946d1f521d1006bbec279377b190781675dbbb)) +* conditionally write gaf data to file ([7f11919](https://github.com/snyk/snyk/commit/7f11919360cd01e97ede0467dd0f064134807157)) +* **deployment:** Deploy alpine arm64 binaries ([9daace4](https://github.com/snyk/snyk/commit/9daace4aa1bdb5d5939d91a118709a5f78b64bb8)) +* drop policy property on global Snyk object ([fef0d69](https://github.com/snyk/snyk/commit/fef0d69e7e67923b1b3d704ef79f8df696ef310e)) +* enable cocoapods to send graphs for cli monitor ([ca56c69](https://github.com/snyk/snyk/commit/ca56c695e65f11b44b0c50f93b892a0e03aea97a)) +* pass allow analytics flag to snyk-iac-test [IAC-3017] ([b12d3ac](https://github.com/snyk/snyk/commit/b12d3acf99a318c3841977ba4a3277b32a8baa22)) + + +### Bug Fixes + +* add normalize help for deriving target files [CLI-448] ([82efb50](https://github.com/snyk/snyk/commit/82efb50280569b5a3f290fda347d18d6a67170ca)) +* **auth:** missing auth issue with oauth ([57ae95c](https://github.com/snyk/snyk/commit/57ae95cf5e3fc3d4c744a782feae2def17e70493)) +* check iacNewEngine FF and pass it to snyk-iac-test [IAC-3059] ([2051a6d](https://github.com/snyk/snyk/commit/2051a6d38071a304dbef97784cfeac20c7f56d09)) +* default limit to max vulnerable paths per vuln, add override option ([302d7ac](https://github.com/snyk/snyk/commit/302d7ac5a396d85cc4c424421ef5b7cfa5f32297)) +* **deployment:** upload sequence to s3 ([e8499b0](https://github.com/snyk/snyk/commit/e8499b041c4ca38b8bed86c704989df2c6408c32)) +* do not show test deps for Dverbose mvn with dependencyManagement ([67e0de9](https://github.com/snyk/snyk/commit/67e0de94c13622c390aff4a5b34bba4791272577)) +* **docs:** update contributing.md ([c2ff465](https://github.com/snyk/snyk/commit/c2ff465c34da80a2630099ca0c3653092d3ec3f9)) +* fixed support for pnpm alias packages ([d506de1](https://github.com/snyk/snyk/commit/d506de1203483cf627680a7ad7aa30b1479ed76c)) +* **iac:** upgrade iac custom rules ext to address vulns [IAC-3065] ([d6cc509](https://github.com/snyk/snyk/commit/d6cc509d919165efa7392b0f0ef532d8840f1207)) +* **iac:** upgrade snyk-iac-test to v0.55.1 [IAC-2940] ([0dadc90](https://github.com/snyk/snyk/commit/0dadc901087b97040243bb8a65b4844df9096a3d)) +* ignore false positive ([71215f6](https://github.com/snyk/snyk/commit/71215f68d35e07b17f93fbe22a93eea36ec2b925)) +* point snyk policy out urls to snyk.io ([28509a3](https://github.com/snyk/snyk/commit/28509a303e5d2b783799291e8db4afd159cd7533)) +* respect default detection depth of 4 ([45a74ea](https://github.com/snyk/snyk/commit/45a74eaf68404a2c046fe11d73682a8b5750368f)) +* restore cert file if it was externally removed ([ef1547f](https://github.com/snyk/snyk/commit/ef1547fde9fa0e53897bbb8c51fa1cf3b02d78b8)) +* scan non publishable projects on improved net ([a6c0e67](https://github.com/snyk/snyk/commit/a6c0e671937a662c0f3b4bfa4eae4c232511f7e8)) +* scan nuget with PublishSingleFile turned on ([2c74298](https://github.com/snyk/snyk/commit/2c74298094b627ec2d5df6b57f5aa49f67d4c132)) +* type errors in tests ([2e39187](https://github.com/snyk/snyk/commit/2e39187881daebaf0458fde772141ce9848c6762)) +* update tests to the current policy schema version ([35acaa9](https://github.com/snyk/snyk/commit/35acaa97fce3bd2627f8246d02ae3b79984fd2bd)) +* upgrade go-getter to 1.7.5 ([970de96](https://github.com/snyk/snyk/commit/970de96595a931f4362c9c95fe2ce901c4c63b55)) +* upgrade go-getter to 1.7.5 ([f730f9d](https://github.com/snyk/snyk/commit/f730f9d8893f13bf896e39d908d6b6d3662b3a42)) +* upgrade iac extension and snyk-iac-test ([9134c05](https://github.com/snyk/snyk/commit/9134c05d3f060daaa4294f47b7d2831bef894e07)) +* upgrade slack/webhook to 7.0.3 ([8ab4433](https://github.com/snyk/snyk/commit/8ab4433d2b9e037cd181270f62d3295a9c6b9086)) +* upgrade slack/webhook to 7.0.3 ([7e1a035](https://github.com/snyk/snyk/commit/7e1a03539f6e8c8a4b6fd500e9b5ac0c5449d079)) +* use runtimeInfo to derive the version for cliv1 path ([652d1ba](https://github.com/snyk/snyk/commit/652d1ba0b4e59aa5e2bf16bf95f31898fc6068b0)) -- Starting with this version, Snyk cli binaries will be distributed via `downloads.snyk.io` instead of `static.snyk.io`. This includes intallation from `npm`. diff --git a/cliv2/Makefile b/cliv2/Makefile index aa23ff92d7..9fb8cb20d2 100644 --- a/cliv2/Makefile +++ b/cliv2/Makefile @@ -161,7 +161,7 @@ fips: .PHONY: debug debug: - $(eval LDFLAGS := -X github.com/snyk/snyk-ls/application/config.Development=true) + $(eval LDFLAGS := -X github.com/snyk/snyk-ls/application/config.Development=true -X main.buildType=debug) $(eval GCFLAGS := -gcflags="all=-N -l") @echo "$(LOG_PREFIX) DEBUG" diff --git a/cliv2/cmd/cliv2/debug.go b/cliv2/cmd/cliv2/debug.go index d0e6bb7a88..0262a501bc 100644 --- a/cliv2/cmd/cliv2/debug.go +++ b/cliv2/cmd/cliv2/debug.go @@ -12,10 +12,13 @@ import ( "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/logging" + "github.com/snyk/go-application-framework/pkg/ui" debug_tools "github.com/snyk/cli/cliv2/internal/debug" ) +var buildType string = "" + func initDebugLogger(config configuration.Configuration) *zerolog.Logger { var consoleWriter = zerolog.ConsoleWriter{ Out: os.Stderr, @@ -42,3 +45,18 @@ func initDebugLogger(config configuration.Configuration) *zerolog.Logger { debugLogger.Log().Msgf("Using log level: %s", loglevel) return &debugLogger } + +func initDebugBuild() { + if strings.EqualFold(buildType, "debug") { + progress := ui.DefaultUi().NewProgressBar() + progress.SetTitle("Pausing execution to attach the debugger!") + waitTimeInSeconds := 10 + + for i := range waitTimeInSeconds { + value := float64(waitTimeInSeconds-i) / float64(waitTimeInSeconds) + progress.UpdateProgress(value) + time.Sleep(1 * time.Second) + } + progress.Clear() + } +} diff --git a/cliv2/cmd/cliv2/logheader.go b/cliv2/cmd/cliv2/logheader.go index f4dc067b4c..91033efb5e 100644 --- a/cliv2/cmd/cliv2/logheader.go +++ b/cliv2/cmd/cliv2/logheader.go @@ -106,7 +106,7 @@ func writeLogHeader(config configuration.Configuration, networkAccess networking fipsEnabled := getFipsStatus(config) - tablePrint("Version", cliv2.GetFullVersion()) + tablePrint("Version", cliv2.GetFullVersion()+" "+buildType) tablePrint("Platform", userAgent) tablePrint("API", config.GetString(configuration.API_URL)) tablePrint("Cache", config.GetString(configuration.CACHE_PATH)) diff --git a/cliv2/cmd/cliv2/main.go b/cliv2/cmd/cliv2/main.go index 97821485cd..84519f99d6 100644 --- a/cliv2/cmd/cliv2/main.go +++ b/cliv2/cmd/cliv2/main.go @@ -160,14 +160,20 @@ func runMainWorkflow(config configuration.Configuration, cmd *cobra.Command, arg func runWorkflowAndProcessData(engine workflow.Engine, logger *zerolog.Logger, name string) error { data, err := engine.Invoke(workflow.NewWorkflowIdentifier(name)) - if err == nil { - var output []workflow.Data - output, err = engine.InvokeWithInput(localworkflows.WORKFLOWID_OUTPUT_WORKFLOW, data) - if err == nil { - err = getErrorFromWorkFlowData(engine, output) - } - } else { + if err != nil { logger.Print("Failed to execute the command!", err) + return err + } + + output, err := engine.InvokeWithInput(localworkflows.WORKFLOWID_DATATRANSFORMATION, data) + if err != nil { + logger.Err(err).Msg(err.Error()) + return err + } + + output, err = engine.InvokeWithInput(localworkflows.WORKFLOWID_OUTPUT_WORKFLOW, output) + if err == nil { + err = getErrorFromWorkFlowData(engine, output) } return err } @@ -441,6 +447,8 @@ func displayError(err error, userInterface ui.UserInterface, config configuratio } func MainWithErrorCode() int { + initDebugBuild() + startTime := time.Now() var err error rInfo := runtimeinfo.New(runtimeinfo.WithName("snyk-cli"), runtimeinfo.WithVersion(cliv2.GetFullVersion())) @@ -520,11 +528,6 @@ func MainWithErrorCode() int { cliAnalytics.GetInstrumentation().SetStage(instrumentation.DetermineStage(cliAnalytics.IsCiEnvironment())) cliAnalytics.GetInstrumentation().SetStatus(analytics.Success) - if !globalConfiguration.GetBool(configuration.ANALYTICS_DISABLED) { - defer sendAnalytics(cliAnalytics, globalLogger) - } - defer sendInstrumentation(globalEngine, cliAnalytics.GetInstrumentation(), globalLogger) - setTimeout(globalConfiguration, func() { os.Exit(constants.SNYK_EXIT_CODE_EX_UNAVAILABLE) }) @@ -565,7 +568,13 @@ func MainWithErrorCode() int { cliAnalytics.GetInstrumentation().SetStatus(analytics.Failure) } + if !globalConfiguration.GetBool(configuration.ANALYTICS_DISABLED) { + sendAnalytics(cliAnalytics, globalLogger) + } + sendInstrumentation(globalEngine, cliAnalytics.GetInstrumentation(), globalLogger) + // cleanup resources in use + // WARNING: deferred actions will execute AFTER cleanup; only defer if not impacted by this _, err = globalEngine.Invoke(basic_workflows.WORKFLOWID_GLOBAL_CLEANUP) if err != nil { globalLogger.Printf("Failed to cleanup %v", err) diff --git a/cliv2/cmd/cliv2/main_test.go b/cliv2/cmd/cliv2/main_test.go index 498428a94e..cb9c68e0f5 100644 --- a/cliv2/cmd/cliv2/main_test.go +++ b/cliv2/cmd/cliv2/main_test.go @@ -4,8 +4,10 @@ import ( "encoding/json" "errors" "fmt" + "io" "os" "os/exec" + "strings" "testing" "time" @@ -196,6 +198,10 @@ func Test_runMainWorkflow_unknownargs(t *testing.T) { assert.NoError(t, err) } + // Register our data transformation workflow + err := localworkflows.InitDataTransformationWorkflow(globalEngine) + assert.NoError(t, err) + _ = globalEngine.Init() config := configuration.NewInMemory() @@ -213,7 +219,7 @@ func Test_runMainWorkflow_unknownargs(t *testing.T) { } // call method under test - err := runMainWorkflow(config, cmd, positionalArgs, rawArgs) + err = runMainWorkflow(config, cmd, positionalArgs, rawArgs) assert.Nil(t, err) actualInputDir := config.GetString(configuration.INPUT_DIRECTORY) @@ -385,6 +391,10 @@ func Test_runWorkflowAndProcessData(t *testing.T) { assert.Nil(t, err) assert.NotNil(t, entry) + // Register our data transformation workflow + err = localworkflows.InitDataTransformationWorkflow(globalEngine) + assert.NoError(t, err) + err = globalEngine.Init() assert.NoError(t, err) @@ -400,6 +410,83 @@ func Test_runWorkflowAndProcessData(t *testing.T) { assert.Equal(t, constants.SNYK_EXIT_CODE_VULNERABILITIES_FOUND, actualCode) } +func Test_runWorkflowAndProcessData_WithTransformation(t *testing.T) { + defer cleanup() + globalConfiguration = configuration.New() + globalConfiguration.Set(configuration.DEBUG, true) + globalConfiguration.Set(configuration.FF_TRANSFORMATION_WORKFLOW, true) + + globalEngine = workflow.NewWorkFlowEngine(globalConfiguration) + + testCmnd := "subcmd1" + workflowId1 := workflow.NewWorkflowIdentifier("output") + + outputFn := func(invocation workflow.InvocationContext, input []workflow.Data) ([]workflow.Data, error) { + assert.Len(t, input, 3, "not enough items received") + localFindingsFound := false + + for i := range input { + mimeType := input[i].GetContentType() + + if strings.HasPrefix(mimeType, content_type.LOCAL_FINDING_MODEL) { + localFindingsFound = true + } + } + + assert.True(t, localFindingsFound) + + return input, nil + } + + workflowConfig := workflow.ConfigurationOptionsFromFlagset(pflag.NewFlagSet("pla", pflag.ContinueOnError)) + + _, err := globalEngine.Register(workflowId1, workflowConfig, outputFn) + assert.NoError(t, err) + + // Register our data transformation workflow + err = localworkflows.InitDataTransformationWorkflow(globalEngine) + assert.NoError(t, err) + + // Invoke a custom command that returns input + fn := func(invocation workflow.InvocationContext, input []workflow.Data) ([]workflow.Data, error) { + typeId := workflow.NewTypeIdentifier(invocation.GetWorkflowIdentifier(), "workflowData") + testSummary := json_schemas.TestSummary{ + Results: []json_schemas.TestSummaryResult{ + { + Severity: "critical", + Total: 10, + Open: 10, + Ignored: 0, + }, + }, + Type: "sast", + } + + var d []byte + d, err = json.Marshal(testSummary) + assert.NoError(t, err) + + testSummaryData := workflow.NewData(typeId, content_type.TEST_SUMMARY, d) + sarifData := workflow.NewData(typeId, content_type.SARIF_JSON, + loadJsonFile(t, "sarif.json")) + + return []workflow.Data{ + testSummaryData, + sarifData, + }, nil + } + wrkflowId := workflow.NewWorkflowIdentifier(testCmnd) + entry, err := globalEngine.Register(wrkflowId, workflowConfig, fn) + assert.NoError(t, err) + assert.NotNil(t, entry) + + err = globalEngine.Init() + assert.NoError(t, err) + + logger := zerolog.New(os.Stderr) + err = runWorkflowAndProcessData(globalEngine, &logger, testCmnd) +} + func Test_setTimeout(t *testing.T) { exitedCh := make(chan struct{}) fakeExit := func() { @@ -463,3 +550,17 @@ type wrErr struct{ wraps error } func (e *wrErr) Error() string { return "something went wrong" } func (e *wrErr) Unwrap() error { return e.wraps } + +func loadJsonFile(t *testing.T, filename string) []byte { + t.Helper() + + jsonFile, err := os.Open("./testdata/" + filename) + assert.NoError(t, err, "failed to load json") + defer func(jsonFile *os.File) { + jsonErr := jsonFile.Close() + assert.NoError(t, jsonErr) + }(jsonFile) + byteValue, err := io.ReadAll(jsonFile) + assert.NoError(t, err) + return byteValue +} diff --git a/cliv2/cmd/cliv2/testdata/sarif.json b/cliv2/cmd/cliv2/testdata/sarif.json new file mode 100644 index 0000000000..b7ea75c6c6 --- /dev/null +++ b/cliv2/cmd/cliv2/testdata/sarif.json @@ -0,0 +1,385 @@ +{ + "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json", + "version": "2.1.0", + "runs": [ + { + "tool": { + "driver": { + "name": "SnykCode", + "semanticVersion": "1.0.0", + "version": "1.0.0", + "rules": [ + { + "id": "javascript/NoSqli", + "name": "NoSqli", + "shortDescription": { + "text": "NoSQL Injection" + }, + "defaultConfiguration": { + "level": "error" + }, + "help": { + "markdown": "## Details\n\nIn an NoSQL injection attack, the user can submit an NoSQL query directly to the database, gaining access without providing appropriate credentials. Attackers can then view, export, modify, and delete confidential information; change passwords and other authentication information; and possibly gain access to other systems within the network. This is one of the most commonly exploited categories of vulnerability, but can largely be avoided through good coding practices.", + "text": "" + }, + "properties": { + "tags": [ + "javascript", + "NoSqli", + "Security", + "SourceServer", + "SourceHttpBody", + "Taint" + ], + "categories": ["Security"], + "exampleCommitFixes": [ + { + "commitURL": "https://gilhub.com/afuh/pinstagram/commit/776a6b63f84b3bc9d38963933ff511b319b73ac5?diff=split#diff-fb901db253d2190ed5dec3508eb32e99524e0b4dcacdaea322a50f2619ae2d99L-1", + "lines": [ + { + "line": "const user = await User.findOne({ slug: req.params.user }).populate('likes')\n", + "lineNumber": 47, + "lineChange": "removed" + }, + { + "line": "const user = await User.findOne({ _id: req.user._id }).populate('likes')\n", + "lineNumber": 47, + "lineChange": "added" + } + ] + }, + { + "commitURL": "https://github.com/mercmobily/hotplate/commit/c9dfbe8bf6bfd03838946d0898978543589a5ea2?diff=split#diff-bdb0afd700d4dfe1801bcfe39008d413182be643063835d326641fcce15b969aL-1", + "lines": [ + { + "line": " Workspace.findOne({ _id: req.params.workspaceId }, function( err, doc ){\n", + "lineNumber": 270, + "lineChange": "removed" + }, + { + "line": " workspaces.findOne({ _id: ObjectId(req.params.workspaceId) }, function( err, doc ){\n", + "lineNumber": 264, + "lineChange": "added" + }, + { + "line": " resUtils.checkFindOneResponse( err, doc, next, function(){\n", + "lineNumber": 271, + "lineChange": "none" + }, + { + "line": "\n", + "lineNumber": 272, + "lineChange": "none" + }, + { + "line": " perms.checkPermissions( req, next, 'workspaceConfig/get', req.body, doc, function(){\n", + "lineNumber": 273, + "lineChange": "none" + }, + { + "line": " sendResponse( res, doc.settings );\n", + "lineNumber": 274, + "lineChange": "none" + }, + { + "line": " });\n", + "lineNumber": 275, + "lineChange": "none" + }, + { + "line": " });\n", + "lineNumber": 276, + "lineChange": "none" + }, + { + "line": "});\n", + "lineNumber": 277, + "lineChange": "none" + } + ] + }, + { + "commitURL": "https://github.com/JasonEtco/flintcms/commit/4ae34238ce39fde00dfa15082397541758c07af1?diff=split#diff-9abe922e7535c6f75fba7150a7a803a93be7ae235564b86f799db9f37e4c1674L-1", + "lines": [ + { + "line": "const token = req.query.t\n", + "lineNumber": 103, + "lineChange": "removed" + }, + { + "line": "const token = req.query.t.toString()\n", + "lineNumber": 103, + "lineChange": "added" + }, + { + "line": "const user = await User.findOne({ token })\n", + "lineNumber": 104, + "lineChange": "none" + } + ] + } + ], + "exampleCommitDescriptions": [], + "precision": "very-high", + "repoDatasetSize": 30, + "cwe": ["CWE-943"] + } + } + ] + } + }, + "results": [ + { + "ruleId": "javascript/NoSqli", + "ruleIndex": 0, + "level": "error", + "message": { + "text": "Unsanitized input from the HTTP request body flows into findOne, where it is used in an NoSQL query. This may result in an NoSQL Injection vulnerability.", + "markdown": "Unsanitized input from {0} {1} into {2}, where it is used in an NoSQL query. This may result in an NoSQL Injection vulnerability.", + "arguments": [ + "[the HTTP request body](0)", + "[flows](1),(2),(3),(4),(5),(6)", + "[findOne](7)" + ] + }, + "locations": [ + { + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 18, + "endLine": 18, + "startColumn": 26, + "endColumn": 33 + } + } + } + ], + "fingerprints": { + "0": "d3e6d95802bfa65cdee1cc840eda6a7b8422f24962e436dd01730e6116e317ec", + "1": "93652555.4773f344.07efaa4d.9163ada2.ef9f7d82.34a4d81a.df7e59ba.d66579bf.f759b1f9.706318d0.07efaa4d.08906714.79a7d027.847dd466.0334236c.041df0b3" + }, + "codeFlows": [ + { + "threadFlows": [ + { + "locations": [ + { + "location": { + "id": 0, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 16, + "endLine": 16, + "startColumn": 20, + "endColumn": 24 + } + } + } + }, + { + "location": { + "id": 1, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 16, + "endLine": 16, + "startColumn": 20, + "endColumn": 24 + } + } + } + }, + { + "location": { + "id": 2, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 16, + "endLine": 16, + "startColumn": 16, + "endColumn": 24 + } + } + } + }, + { + "location": { + "id": 3, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 16, + "endLine": 16, + "startColumn": 11, + "endColumn": 13 + } + } + } + }, + { + "location": { + "id": 4, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 18, + "endLine": 18, + "startColumn": 41, + "endColumn": 43 + } + } + } + }, + { + "location": { + "id": 5, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 18, + "endLine": 18, + "startColumn": 36, + "endColumn": 39 + } + } + } + }, + { + "location": { + "id": 6, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 18, + "endLine": 18, + "startColumn": 34, + "endColumn": 45 + } + } + } + }, + { + "location": { + "id": 7, + "physicalLocation": { + "artifactLocation": { + "uri": "routes/likeProductReviews.ts", + "uriBaseId": "%SRCROOT%" + }, + "region": { + "startLine": 18, + "endLine": 18, + "startColumn": 26, + "endColumn": 33 + } + } + } + } + ] + } + ] + } + ], + "properties": { + "priorityScore": 802, + "priorityScoreFactors": [ + { + "label": true, + "type": "multipleOccurrence" + }, + { + "label": true, + "type": "hotFileSource" + }, + { + "label": true, + "type": "fixExamples" + } + ], + "isAutofixable": false + } + } + ], + "properties": { + "coverage": [ + { + "isSupported": true, + "lang": "Python", + "files": 1, + "type": "SUPPORTED" + }, + { + "isSupported": true, + "lang": "TypeScript", + "files": 536, + "type": "SUPPORTED" + }, + { + "isSupported": true, + "lang": "HTML", + "files": 73, + "type": "SUPPORTED" + }, + { + "isSupported": true, + "lang": "XML", + "files": 5, + "type": "SUPPORTED" + }, + { + "isSupported": true, + "lang": "JavaScript", + "files": 15, + "type": "SUPPORTED" + }, + { + "isSupported": false, + "lang": "XML", + "files": 2, + "type": "FAILED_PARSING" + }, + { + "isSupported": false, + "lang": "Python", + "files": 2, + "type": "FAILED_PARSING" + }, + { + "isSupported": false, + "lang": "TypeScript", + "files": 35, + "type": "FAILED_PARSING" + } + ] + } + } + ] +} diff --git a/cliv2/go.mod b/cliv2/go.mod index d017d94036..35de902e5a 100644 --- a/cliv2/go.mod +++ b/cliv2/go.mod @@ -13,15 +13,15 @@ require ( github.com/pkg/errors v0.9.1 github.com/rs/zerolog v1.33.0 github.com/snyk/cli-extension-dep-graph v0.0.0-20240426125928-8d56ac52821e - github.com/snyk/cli-extension-iac-rules v0.0.0-20240422133948-ae17a4306672 - github.com/snyk/cli-extension-sbom v0.0.0-20240812130014-3f4e892f15ec - github.com/snyk/container-cli v0.0.0-20240322120441-6d9b9482f9b1 + github.com/snyk/cli-extension-iac-rules v0.0.0-20241008152401-24c8cf03a1a3 + github.com/snyk/cli-extension-sbom v0.0.0-20240820111700-68258cba52c7 + github.com/snyk/container-cli v0.0.0-20240821111304-7ca1c415a5d7 github.com/snyk/error-catalog-golang-public v0.0.0-20240809094525-c48d19c27edb - github.com/snyk/go-application-framework v0.0.0-20240822160346-edf22a8795be + github.com/snyk/go-application-framework v0.0.0-20241009095349-dc0fb55f3eb3 github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65 github.com/snyk/snyk-iac-capture v0.6.5 - github.com/snyk/snyk-ls v0.0.0-20240822130022-1f0ca57c12f0 - github.com/spf13/cobra v1.8.0 + github.com/snyk/snyk-ls v0.0.0-20241009134219-56a46746f0be + github.com/spf13/cobra v1.8.1 github.com/spf13/pflag v1.0.5 github.com/stretchr/testify v1.9.0 ) @@ -33,6 +33,8 @@ require ( cloud.google.com/go/compute/metadata v0.5.0 // indirect cloud.google.com/go/iam v1.2.0 // indirect cloud.google.com/go/storage v1.43.0 // indirect + cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 // indirect + cuelang.org/go v0.10.0 // indirect dario.cat/mergo v1.0.1 // indirect github.com/Microsoft/go-winio v0.6.2 // indirect github.com/OneOfOne/xxhash v1.2.8 // indirect @@ -56,6 +58,7 @@ require ( github.com/charmbracelet/bubbletea v0.23.1 // indirect github.com/charmbracelet/lipgloss v0.10.0 // indirect github.com/cloudflare/circl v1.3.8 // indirect + github.com/cockroachdb/apd/v3 v3.2.1 // indirect github.com/containerd/console v1.0.3 // indirect github.com/creachadair/jrpc2 v1.2.1 // indirect github.com/creachadair/mds v0.16.0 // indirect @@ -64,6 +67,7 @@ require ( github.com/deepmap/oapi-codegen v1.16.3 // indirect github.com/denisbrodbeck/machineid v1.0.1 // indirect github.com/docker/distribution v2.8.2+incompatible // indirect + github.com/emicklei/proto v1.13.2 // indirect github.com/emirpasic/gods v1.18.1 // indirect github.com/erikgeiser/promptkit v0.8.0 // indirect github.com/erni27/imcache v1.2.0 // indirect @@ -93,9 +97,9 @@ require ( github.com/hashicorp/errwrap v1.1.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect github.com/hashicorp/go-getter v1.7.5 // indirect - github.com/hashicorp/go-hclog v1.5.0 // indirect + github.com/hashicorp/go-hclog v1.6.3 // indirect github.com/hashicorp/go-multierror v1.1.1 // indirect - github.com/hashicorp/go-retryablehttp v0.7.1 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/go-uuid v1.0.3 // indirect github.com/hashicorp/go-version v1.7.0 // indirect @@ -138,6 +142,7 @@ require ( github.com/olekukonko/tablewriter v0.0.5 // indirect github.com/open-policy-agent/opa v0.51.0 // indirect github.com/opencontainers/go-digest v1.0.0 // indirect + github.com/opencontainers/image-spec v1.1.0 // indirect github.com/otiai10/copy v1.14.0 // indirect github.com/pact-foundation/pact-go/v2 v2.0.5 // indirect github.com/pelletier/go-toml/v2 v2.2.2 // indirect @@ -147,10 +152,12 @@ require ( github.com/pjbgf/sha1cd v0.3.0 // indirect github.com/pkg/browser v0.0.0-20240102092130-5ac0b6a4141c // indirect github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect + github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 // indirect github.com/puzpuzpuz/xsync v1.5.2 // indirect github.com/puzpuzpuz/xsync/v3 v3.4.0 // indirect github.com/rcrowley/go-metrics v0.0.0-20201227073835-cf1acfcdf475 // indirect github.com/rivo/uniseg v0.4.7 // indirect + github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 // indirect github.com/sabhiram/go-gitignore v0.0.0-20210923224102-525f6e181f06 // indirect github.com/sagikazarmark/locafero v0.4.0 // indirect github.com/sagikazarmark/slog-shim v0.1.0 // indirect @@ -158,7 +165,7 @@ require ( github.com/shirou/gopsutil v3.21.11+incompatible // indirect github.com/skeema/knownhosts v1.2.2 // indirect github.com/snyk/code-client-go v1.10.0 // indirect - github.com/snyk/policy-engine v0.30.11 // indirect + github.com/snyk/policy-engine v0.31.3 // indirect github.com/sourcegraph/conc v0.3.0 // indirect github.com/sourcegraph/go-lsp v0.0.0-20240223163137-f80c5dd31dfd // indirect github.com/spf13/afero v1.11.0 // indirect @@ -170,6 +177,7 @@ require ( github.com/tklauser/numcpus v0.8.0 // indirect github.com/ulikunitz/xz v0.5.12 // indirect github.com/vincent-petithory/dataurl v1.0.0 // indirect + github.com/writeas/go-strip-markdown v2.0.1+incompatible // indirect github.com/xanzy/ssh-agent v0.3.3 // indirect github.com/xeipuuv/gojsonpointer v0.0.0-20190905194746-02993c407bfb // indirect github.com/xeipuuv/gojsonreference v0.0.0-20180127040603-bd5ef7bd5415 // indirect diff --git a/cliv2/go.sum b/cliv2/go.sum index 1921fb61d3..e1e0cc9553 100644 --- a/cliv2/go.sum +++ b/cliv2/go.sum @@ -188,6 +188,10 @@ cloud.google.com/go/webrisk v1.4.0/go.mod h1:Hn8X6Zr+ziE2aNd8SliSDWpEnSS1u4R9+xX cloud.google.com/go/webrisk v1.5.0/go.mod h1:iPG6fr52Tv7sGk0H6qUFzmL3HHZev1htXuWDEEsqMTg= cloud.google.com/go/workflows v1.6.0/go.mod h1:6t9F5h/unJz41YqfBmqSASJSXccBLtD1Vwf+KmJENM0= cloud.google.com/go/workflows v1.7.0/go.mod h1:JhSrZuVZWuiDfKEFxU0/F1PQjmpnpcoISEXH2bcHC3M= +cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79 h1:EceZITBGET3qHneD5xowSTY/YHbNybvMWGh62K2fG/M= +cuelabs.dev/go/oci/ociregistry v0.0.0-20240807094312-a32ad29eed79/go.mod h1:5A4xfTzHTXfeVJBU6RAUf+QrlfTCW+017q/QiW+sMLg= +cuelang.org/go v0.10.0 h1:Y1Pu4wwga5HkXfLFK1sWAYaSWIBdcsr5Cb5AWj2pOuE= +cuelang.org/go v0.10.0/go.mod h1:HzlaqqqInHNiqE6slTP6+UtxT9hN6DAzgJgdbNxXvX8= dario.cat/mergo v1.0.1 h1:Ra4+bf83h2ztPIQYNP99R6m+Y7KfnARDfID+a+vLl4s= dario.cat/mergo v1.0.1/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= @@ -284,10 +288,12 @@ github.com/cncf/xds/go v0.0.0-20210805033703-aa0b78936158/go.mod h1:eXthEFrGJvWH github.com/cncf/xds/go v0.0.0-20210922020428-25de7278fc84/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211001041855-01bcc9b48dfe/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= github.com/cncf/xds/go v0.0.0-20211011173535-cb28da3451f1/go.mod h1:eXthEFrGJvWHgFFCl3hGmgk+/aYT6PnTQLykKQRLhEs= +github.com/cockroachdb/apd/v3 v3.2.1 h1:U+8j7t0axsIgvQUqthuNm82HIrYXodOV2iWLWtEaIwg= +github.com/cockroachdb/apd/v3 v3.2.1/go.mod h1:klXJcjp+FffLTHlhIG69tezTDvdP065naDsHzKhYSqc= github.com/containerd/console v1.0.3 h1:lIr7SlA5PxZyMV30bDW0MGbiOPXwc63yRuCP0ARubLw= github.com/containerd/console v1.0.3/go.mod h1:7LqA/THxQ86k76b8c/EMSiaJ3h1eZkMkXar0TQ1gf3U= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= -github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= +github.com/cpuguy83/go-md2man/v2 v2.0.4/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creachadair/jrpc2 v1.2.1 h1:eIgmguoqLbEjn4Pb/XYMg5U1PhVpAClGdhI/Q4gfC5o= github.com/creachadair/jrpc2 v1.2.1/go.mod h1:RvEKAYVpDBKn3YWlTVQJIFmxG5GuLD7ztp9FMTJx8eI= github.com/creachadair/mds v0.16.0 h1:v6DlvKXClowXFg4hkjLCR1FEFiREMf0qgX+Lm5GsEKk= @@ -317,6 +323,8 @@ github.com/elazarl/goproxy v0.0.0-20231031074852-3ec07828be7a/go.mod h1:Ro8st/El github.com/elazarl/goproxy/ext v0.0.0-20190711103511-473e67f1d7d2/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8= github.com/elazarl/goproxy/ext v0.0.0-20230808193330-2592e75ae04a h1:6hp3+W5oJSkbk/m2XquFdhih2H4wxxR0Nl6GfPL8kss= github.com/elazarl/goproxy/ext v0.0.0-20230808193330-2592e75ae04a/go.mod h1:gNh8nYJoAm43RfaxurUnxr+N1PwuFV3ZMl/efxlIlY8= +github.com/emicklei/proto v1.13.2 h1:z/etSFO3uyXeuEsVPzfl56WNgzcvIr42aQazXaQmFZY= +github.com/emicklei/proto v1.13.2/go.mod h1:rn1FgRS/FANiZdD2djyH7TMA9jdRDcYQ9IEN9yvjX0A= github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= @@ -388,6 +396,8 @@ github.com/go-openapi/jsonpointer v0.21.0 h1:YgdVicSA9vH5RiHs9TZW5oyafXZFc6+2Vc1 github.com/go-openapi/jsonpointer v0.21.0/go.mod h1:IUyH9l/+uyhIYQ/PXVA41Rexl+kOkAPDdXEYns6fzUY= github.com/go-openapi/swag v0.23.0 h1:vsEVJDUo2hPJ2tu0/Xc+4noaxyEffXNIs3cOULZ+GrE= github.com/go-openapi/swag v0.23.0/go.mod h1:esZ8ITTYEsH1V2trKHjAN8Ai7xHb8RV+YSZ577vPjgQ= +github.com/go-quicktest/qt v1.101.0 h1:O1K29Txy5P2OK0dGo59b7b0LR6wKfIhttaAhHUyn7eI= +github.com/go-quicktest/qt v1.101.0/go.mod h1:14Bz/f7NwaXPtdYEgzsx46kqSxVwTbzVZsDC26tQJow= github.com/go-test/deep v1.0.1/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA= github.com/go-test/deep v1.0.8 h1:TDsG77qcSprGbC6vTN8OuXp5g+J+b5Pcguhf7Zt61VM= github.com/go-test/deep v1.0.8/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE= @@ -522,13 +532,12 @@ github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9n github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= github.com/hashicorp/go-getter v1.7.5 h1:dT58k9hQ/vbxNMwoI5+xFYAJuv6152UNvdHokfI5wE4= github.com/hashicorp/go-getter v1.7.5/go.mod h1:W7TalhMmbPmsSMdNjD0ZskARur/9GJ17cfHTRtXV744= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-retryablehttp v0.7.1 h1:sUiuQAnLlbvmExtFQs72iFW/HXeUn8Z1aJLQ4LJJbTQ= -github.com/hashicorp/go-retryablehttp v0.7.1/go.mod h1:vAew36LZh98gCBJNLH42IQ1ER/9wtLZZ8meHqQvEYWY= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= github.com/hashicorp/go-uuid v1.0.2/go.mod h1:6SBZvOh/SIDV7/2o3Jml5SYk/TvGqwFJ/bN7x4byOro= @@ -597,6 +606,8 @@ github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= github.com/kylelemons/godebug v0.0.0-20170820004349-d65d576e9348/go.mod h1:B69LEHPfb2qLo0BaaOLcbitczOKLWTsrBG9LczfCD4k= github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= +github.com/lib/pq v1.10.7 h1:p7ZhMD+KsSRozJr34udlUrhboJwWAgCg34+/ZZNvZZw= +github.com/lib/pq v1.10.7/go.mod h1:AlVN5x4E4T544tWzH6hKfbfQvm3HdbOxrmggDNAPY9o= github.com/lucasb-eyer/go-colorful v1.2.0 h1:1nnpGOrhyZZuNyfu1QjKiUICQ74+3FNCN69Aj6K7nkY= github.com/lucasb-eyer/go-colorful v1.2.0/go.mod h1:R4dSotOR9KMtayYi1e77YzuveK+i7ruzyGqttikkLy0= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -666,6 +677,8 @@ github.com/open-policy-agent/opa v0.51.0 h1:2hS5xhos8HtkN+mgpqMhNJSFtn/1n/h3wh+A github.com/open-policy-agent/opa v0.51.0/go.mod h1:OjmwLfXdeR7skSxrt8Yd3ScXTqPxyJn7GeTRJrcEerU= github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U= github.com/opencontainers/go-digest v1.0.0/go.mod h1:0JzlMkj0TRzQZfJkVvzbP0HBR3IKzErnv2BNG4W4MAM= +github.com/opencontainers/image-spec v1.1.0 h1:8SG7/vwALn54lVB/0yZ/MMwhFrPYtpEHQb2IpWsCzug= +github.com/opencontainers/image-spec v1.1.0/go.mod h1:W4s4sFTMaBeK1BQLXbG4AdM2szdn85PY75RI83NrTrM= github.com/otiai10/copy v1.14.0 h1:dCI/t1iTdYGtkvCuBG2BgR6KZa83PTclw4U5n2wAllU= github.com/otiai10/copy v1.14.0/go.mod h1:ECfuL02W+/FkTWZWgQqXPWZgW9oeKCSQ5qVfSc4qc4w= github.com/otiai10/mint v1.5.1 h1:XaPLeE+9vGbuyEHem1JNk3bYc7KKqyI/na0/mLd/Kks= @@ -700,6 +713,8 @@ github.com/prometheus/common v0.37.0 h1:ccBbHCgIiT9uSoFY0vX8H3zsNR5eLt17/RQLUvn8 github.com/prometheus/common v0.37.0/go.mod h1:phzohg0JFMnBEFGxTDbfu3QyL5GI8gTQJFhYO5B3mfA= github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5mo= github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4= +github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0 h1:sadMIsgmHpEOGbUs6VtHBXRR1OHevnj7hLx9ZcdNGW4= +github.com/protocolbuffers/txtpbfmt v0.0.0-20230328191034-3462fbc510c0/go.mod h1:jgxiZysxFPM+iWKwQwPR+y+Jvo54ARd4EisXxKYpB5c= github.com/puzpuzpuz/xsync v1.5.2 h1:yRAP4wqSOZG+/4pxJ08fPTwrfL0IzE/LKQ/cw509qGY= github.com/puzpuzpuz/xsync v1.5.2/go.mod h1:K98BYhX3k1dQ2M63t1YNVDanbwUPmBCAhNmVrrxfiGg= github.com/puzpuzpuz/xsync/v3 v3.4.0 h1:DuVBAdXuGFHv8adVXjWWZ63pJq+NRXOWVXlKDBZ+mJ4= @@ -713,8 +728,8 @@ github.com/rivo/uniseg v0.4.7/go.mod h1:FN3SvrM+Zdj16jyLfmOkMNblXMcoc8DfTHruCPUc github.com/rogpeppe/fastuuid v1.2.0/go.mod h1:jVj6XXZzXRy/MSR5jhDC/2q6DgLz+nrA6LYCDYWNEvQ= github.com/rogpeppe/go-charset v0.0.0-20180617210344-2471d30d28b4/go.mod h1:qgYeAmZ5ZIpBWTGllZSQnw97Dj+woV0toclVaRGI8pc= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= -github.com/rogpeppe/go-internal v1.12.0 h1:exVL4IDcn6na9z1rAb56Vxr+CgyK3nn3O+epU5NdKM8= -github.com/rogpeppe/go-internal v1.12.0/go.mod h1:E+RYuTGaKKdloAfM02xzb0FW3Paa99yedzYV+kq4uf4= +github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21 h1:igWZJluD8KtEtAgRyF4x6lqcxDry1ULztksMJh2mnQE= +github.com/rogpeppe/go-internal v1.12.1-0.20240709150035-ccf4b4329d21/go.mod h1:RMRJLmBOqWacUkmJHRMiPKh1S1m3PA7Zh4W80/kWPpg= github.com/rs/xid v1.5.0/go.mod h1:trrq9SKmegXys3aeAKXMUTdJsYXVwGY3RLcfgqegfbg= github.com/rs/zerolog v1.33.0 h1:1cU2KZkvPxNyfgEmhHAz/1A9Bz+llsdYzklWFzgp0r8= github.com/rs/zerolog v1.33.0/go.mod h1:/7mN4D5sKwJLZQ2b/znpjC3/GQWY/xaDXUM0kKWRHss= @@ -737,26 +752,26 @@ github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/snyk/cli-extension-dep-graph v0.0.0-20240426125928-8d56ac52821e h1:j17Ujw51/2SC3m1hbNCUwxFc8aNIFyfpnwFAszgEM8c= github.com/snyk/cli-extension-dep-graph v0.0.0-20240426125928-8d56ac52821e/go.mod h1:QF3v8HBpOpyudYNCuR8LqfULutO76c91sBdLzD+pBJU= -github.com/snyk/cli-extension-iac-rules v0.0.0-20240422133948-ae17a4306672 h1:AkLej8Lk//vFex1fiygSYFrQTUd0xP+GyRbsI+m2kwQ= -github.com/snyk/cli-extension-iac-rules v0.0.0-20240422133948-ae17a4306672/go.mod h1:2vKTUsW73sVbDcyD19iNLfN0so2GSu9BE3k/fqG0mjA= -github.com/snyk/cli-extension-sbom v0.0.0-20240812130014-3f4e892f15ec h1:xkRPBjcqXwRnWCYCoybM9PO2J/hYn8txm/wzwnDImZg= -github.com/snyk/cli-extension-sbom v0.0.0-20240812130014-3f4e892f15ec/go.mod h1:5CaY1bgvJY/uoG/1plLOf8T8o9AkwoBIGvw34RfRLZw= +github.com/snyk/cli-extension-iac-rules v0.0.0-20241008152401-24c8cf03a1a3 h1:AQMi52/aevl9bBSzwxGLz9kxInojkSe/Q6j1s1s6yJg= +github.com/snyk/cli-extension-iac-rules v0.0.0-20241008152401-24c8cf03a1a3/go.mod h1:A/DNK3ZnUgqOKJ33Lc1z5KbbHqRSBgwCWw9KuyJu0xQ= +github.com/snyk/cli-extension-sbom v0.0.0-20240820111700-68258cba52c7 h1:+xhigV8lkriZ8riIg79Yx/sDpKZV9ihz2iAM0Xa8/V4= +github.com/snyk/cli-extension-sbom v0.0.0-20240820111700-68258cba52c7/go.mod h1:5CaY1bgvJY/uoG/1plLOf8T8o9AkwoBIGvw34RfRLZw= github.com/snyk/code-client-go v1.10.0 h1:t/hBINxj4lKvoo681uGhxHBpMued/j68p2sHbB9qbfo= github.com/snyk/code-client-go v1.10.0/go.mod h1:orU911flV1kJQOlxxx0InUQkAfpBrcERsb2olfnlI8s= -github.com/snyk/container-cli v0.0.0-20240322120441-6d9b9482f9b1 h1:9RKY9NdX5DrJAoVXDP0JiqrXT+4Nb9NH8pjEcA0NsLA= -github.com/snyk/container-cli v0.0.0-20240322120441-6d9b9482f9b1/go.mod h1:38w+dcAQp9eG3P5t2eNS9eG0reut10AeJjLv5lJ5lpM= +github.com/snyk/container-cli v0.0.0-20240821111304-7ca1c415a5d7 h1:Zn5BcV76oFAbJm5tDygU945lvoZ3yY8FoRFDC3YpwF8= +github.com/snyk/container-cli v0.0.0-20240821111304-7ca1c415a5d7/go.mod h1:38w+dcAQp9eG3P5t2eNS9eG0reut10AeJjLv5lJ5lpM= github.com/snyk/error-catalog-golang-public v0.0.0-20240809094525-c48d19c27edb h1:w9tJhpTFxWqAhLeraGsMExDjGK9x5Dwj1NRFwb+t+QE= github.com/snyk/error-catalog-golang-public v0.0.0-20240809094525-c48d19c27edb/go.mod h1:Ytttq7Pw4vOCu9NtRQaOeDU2dhBYUyNBe6kX4+nIIQ4= -github.com/snyk/go-application-framework v0.0.0-20240822160346-edf22a8795be h1:5m55hR2Vmvx4dqeWNOjRwppAOBvmTM/BodJCHAbFoEY= -github.com/snyk/go-application-framework v0.0.0-20240822160346-edf22a8795be/go.mod h1:zgYTVG71nX7zTb3ELeRlnwE/uKQxeOyQmAHtg6bC4uU= +github.com/snyk/go-application-framework v0.0.0-20241009095349-dc0fb55f3eb3 h1:aUFtOsdCHfiwb7LJV8jh+xjich9VpAczNtuMtij7CtM= +github.com/snyk/go-application-framework v0.0.0-20241009095349-dc0fb55f3eb3/go.mod h1:LeMsRM1FxIfO/8QpOs9V/dI46ie/RAQl02ulAh6aKys= github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65 h1:CEQuYv0Go6MEyRCD3YjLYM2u3Oxkx8GpCpFBd4rUTUk= github.com/snyk/go-httpauth v0.0.0-20240307114523-1f5ea3f55c65/go.mod h1:88KbbvGYlmLgee4OcQ19yr0bNpXpOr2kciOthaSzCAg= -github.com/snyk/policy-engine v0.30.11 h1:wUy5LMar2vccMbNM62MSBRdjAQAhAbIm7aNXXO+g2tk= -github.com/snyk/policy-engine v0.30.11/go.mod h1:O6nwYXbb+SNDrYVWBwkieOwVuwXNKBHuXLm//fLz9Dw= +github.com/snyk/policy-engine v0.31.3 h1:FepCg6QN/X8uvxYjF+WwB2aiBPJB+NENDgKQeI/FwLg= +github.com/snyk/policy-engine v0.31.3/go.mod h1:Z9/hcngz+2txX4QfQRwfODk8F7w4mr/IQOvCtIosnLo= github.com/snyk/snyk-iac-capture v0.6.5 h1:992DXCAJSN97KtUh8T5ndaWwd/6ZCal2bDkRXqM1u/E= github.com/snyk/snyk-iac-capture v0.6.5/go.mod h1:e47i55EmM0F69ZxyFHC4sCi7vyaJW6DLoaamJJCzWGk= -github.com/snyk/snyk-ls v0.0.0-20240822130022-1f0ca57c12f0 h1:HL+leNnMl/K3S8eDmkmVwzpYrVd8lZVYZ4Mf7HL4qFM= -github.com/snyk/snyk-ls v0.0.0-20240822130022-1f0ca57c12f0/go.mod h1:bOm4GcjzuUTIKDGQ6g780lrNiaJjHo9fPYXz6gvlqUk= +github.com/snyk/snyk-ls v0.0.0-20241009134219-56a46746f0be h1:QBKSlJktuoeTiXMt6IftT8Q4nehdaZPodl/bwhbD78I= +github.com/snyk/snyk-ls v0.0.0-20241009134219-56a46746f0be/go.mod h1:ymVHnn1JE/pqWhTHSQEz/1MP8FmYYfYFszaptaaI/PE= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= github.com/sourcegraph/conc v0.3.0/go.mod h1:Sdozi7LEKbFPqYX2/J+iBAM6HpqSLTASQIKqDmF7Mt0= github.com/sourcegraph/go-lsp v0.0.0-20240223163137-f80c5dd31dfd h1:Dq5WSzWsP1TbVi10zPWBI5LKEBDg4Y1OhWEph1wr5WQ= @@ -766,8 +781,8 @@ github.com/spf13/afero v1.11.0 h1:WJQKhtpdm3v2IzqG8VMqrr6Rf3UYpEF239Jy9wNepM8= github.com/spf13/afero v1.11.0/go.mod h1:GH9Y3pIexgf1MTIWtNGyogA5MwRIDXGUr+hbWNoBjkY= github.com/spf13/cast v1.6.0 h1:GEiTHELF+vaR5dhz3VqZfFSzZjYbgeKDpBxQVS4GYJ0= github.com/spf13/cast v1.6.0/go.mod h1:ancEpBxwJDODSW/UG4rDrAqiKolqNNh2DX3mk86cAdo= -github.com/spf13/cobra v1.8.0 h1:7aJaZx1B85qltLMc546zn58BxxfZdR/W22ej9CFoEf0= -github.com/spf13/cobra v1.8.0/go.mod h1:WXLWApfZ71AjXPya3WOlMsY9yMs7YeiHhFVlvLyhcho= +github.com/spf13/cobra v1.8.1 h1:e5/vxKd/rZsfSJMUX1agtjeTDf+qv1/JdBF8gg5k9ZM= +github.com/spf13/cobra v1.8.1/go.mod h1:wHxEcudfqmLYa8iTfL+OuZPbBZkmvliBWKIezN3kD9Y= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg= github.com/spf13/viper v1.18.2 h1:LUXCnvUvSM6FXAsj6nnfc8Q2tp1dIgUfY9Kc8GsSOiQ= @@ -815,6 +830,8 @@ github.com/ulikunitz/xz v0.5.12/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0o github.com/vincent-petithory/dataurl v1.0.0 h1:cXw+kPto8NLuJtlMsI152irrVw9fRDX8AbShPRpg2CI= github.com/vincent-petithory/dataurl v1.0.0/go.mod h1:FHafX5vmDzyP+1CQATJn7WFKc9CvnvxyvZy6I1MrG/U= github.com/vmihailenco/msgpack v3.3.3+incompatible/go.mod h1:fy3FlTQTDXWkZ7Bh6AcGMlsjHatGryHQYUTf1ShIgkk= +github.com/writeas/go-strip-markdown v2.0.1+incompatible h1:IIqxTM5Jr7RzhigcL6FkrCNfXkvbR+Nbu1ls48pXYcw= +github.com/writeas/go-strip-markdown v2.0.1+incompatible/go.mod h1:Rsyu10ZhbEK9pXdk8V6MVnZmTzRG0alMNLMwa0J01fE= github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/xeipuuv/gojsonpointer v0.0.0-20180127040702-4e3ac2762d5f/go.mod h1:N2zxlSyiKSe5eX1tZViRH5QA0qijqEDrYZiPEAiq3wU= diff --git a/cliv2/internal/cliv2/cliv2.go b/cliv2/internal/cliv2/cliv2.go index c802d1063a..5e03660d79 100644 --- a/cliv2/internal/cliv2/cliv2.go +++ b/cliv2/internal/cliv2/cliv2.go @@ -22,6 +22,7 @@ import ( "github.com/rs/zerolog" "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/instrumentation" + "github.com/snyk/go-application-framework/pkg/runtimeinfo" "github.com/snyk/go-application-framework/pkg/utils" cli_errors "github.com/snyk/cli/cliv2/internal/errors" @@ -31,7 +32,6 @@ import ( "github.com/snyk/cli/cliv2/internal/embedded" "github.com/snyk/cli/cliv2/internal/embedded/cliv1" "github.com/snyk/cli/cliv2/internal/proxy" - local_utils "github.com/snyk/cli/cliv2/internal/utils" ) type Handler int @@ -58,14 +58,10 @@ const ( V2_ABOUT Handler = iota ) -func NewCLIv2(config configuration.Configuration, debugLogger *log.Logger) (*CLI, error) { +func NewCLIv2(config configuration.Configuration, debugLogger *log.Logger, ri runtimeinfo.RuntimeInfo) (*CLI, error) { cacheDirectory := config.GetString(configuration.CACHE_PATH) - v1BinaryLocation, err := cliv1.GetFullCLIV1TargetPath(cacheDirectory) - if err != nil { - fmt.Println(err) - return nil, err - } + v1BinaryLocation := path.Join(cacheDirectory, ri.GetVersion(), cliv1.GetCLIv1Filename()) cli := CLI{ DebugLogger: debugLogger, @@ -93,7 +89,7 @@ func (c *CLI) Init() (err error) { if len(c.CacheDirectory) > 0 { // ensure the specified base cache directory exists, this needs to be done even before acquiring the lock if _, err = os.Stat(c.CacheDirectory); os.IsNotExist(err) { - err = os.MkdirAll(c.CacheDirectory, local_utils.CACHEDIR_PERMISSION) + err = os.MkdirAll(c.CacheDirectory, utils.DIR_PERMISSION) if err != nil { return fmt.Errorf("Cache directory path is invalid: %w", err) } @@ -117,7 +113,7 @@ func (c *CLI) Init() (err error) { c.DebugLogger.Printf("Init-Lock acquired: %v (%s)\n", fileLock.Locked(), lockFileName) // create required cache and temp directories - err = local_utils.CreateAllDirectories(c.CacheDirectory, GetFullVersion()) + err = utils.CreateAllDirectories(c.CacheDirectory, GetFullVersion()) if err != nil { return err } @@ -218,7 +214,7 @@ func (c *CLI) GetBinaryLocation() string { } func (c *CLI) GetTempDir() string { - return local_utils.GetTemporaryDirectory(c.CacheDirectory, cliv1.CLIV1Version()) + return c.globalConfig.GetString(configuration.TEMP_DIR_PATH) } func (c *CLI) printVersion() { diff --git a/cliv2/internal/cliv2/cliv2_test.go b/cliv2/internal/cliv2/cliv2_test.go index b676a1ea2a..eb0f6b8a70 100644 --- a/cliv2/internal/cliv2/cliv2_test.go +++ b/cliv2/internal/cliv2/cliv2_test.go @@ -13,14 +13,17 @@ import ( "testing" "time" + "github.com/snyk/go-application-framework/pkg/app" "github.com/snyk/go-application-framework/pkg/configuration" + "github.com/snyk/go-application-framework/pkg/runtimeinfo" + "github.com/snyk/go-application-framework/pkg/utils" + "github.com/snyk/cli/cliv2/internal/embedded/cliv1" cli_errors "github.com/snyk/cli/cliv2/internal/errors" "github.com/snyk/cli/cliv2/internal/cliv2" "github.com/snyk/cli/cliv2/internal/constants" "github.com/snyk/cli/cliv2/internal/proxy" - "github.com/snyk/cli/cliv2/internal/utils" "github.com/stretchr/testify/assert" ) @@ -35,6 +38,11 @@ func getCacheDir(t *testing.T) string { return cacheDir } +func getRuntimeInfo(t *testing.T) runtimeinfo.RuntimeInfo { + t.Helper() + return runtimeinfo.New(runtimeinfo.WithVersion(cliv1.CLIV1Version())) +} + func Test_PrepareV1EnvironmentVariables_Fill_and_Filter(t *testing.T) { orgid := "orgid" testapi := "https://api.snyky.io" @@ -269,7 +277,7 @@ func Test_prepareV1Command(t *testing.T) { cacheDir := getCacheDir(t) config := configuration.NewInMemory() config.Set(configuration.CACHE_PATH, cacheDir) - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) snykCmd, err := cli.PrepareV1Command( @@ -298,7 +306,7 @@ func Test_extractOnlyOnce(t *testing.T) { assert.NoDirExists(t, tmpDir) // create instance under test - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) assert.NoError(t, cli.Init()) @@ -332,7 +340,7 @@ func Test_init_extractDueToInvalidBinary(t *testing.T) { assert.NoDirExists(t, tmpDir) // create instance under test - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) // fill binary with invalid data @@ -370,7 +378,7 @@ func Test_executeRunV2only(t *testing.T) { assert.NoDirExists(t, tmpDir) // create instance under test - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) assert.NoError(t, cli.Init()) @@ -387,7 +395,7 @@ func Test_executeUnknownCommand(t *testing.T) { config.Set(configuration.CACHE_PATH, cacheDir) // create instance under test - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) assert.NoError(t, cli.Init()) @@ -397,11 +405,12 @@ func Test_executeUnknownCommand(t *testing.T) { func Test_clearCache(t *testing.T) { cacheDir := getCacheDir(t) - config := configuration.NewInMemory() + engine := app.CreateAppEngine() + config := engine.GetConfiguration() config.Set(configuration.CACHE_PATH, cacheDir) // create instance under test - cli, _ := cliv2.NewCLIv2(config, discardLogger) + cli, _ := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.Nil(t, cli.Init()) // create folders and files in cache dir @@ -432,11 +441,12 @@ func Test_clearCache(t *testing.T) { func Test_clearCacheBigCache(t *testing.T) { cacheDir := getCacheDir(t) - config := configuration.NewInMemory() + engine := app.CreateAppEngine() + config := engine.GetConfiguration() config.Set(configuration.CACHE_PATH, cacheDir) // create instance under test - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) assert.NoError(t, cli.Init()) @@ -477,7 +487,7 @@ func Test_setTimeout(t *testing.T) { t.Skip("Skipping test on windows") } config := configuration.NewInMemory() - cli, err := cliv2.NewCLIv2(config, discardLogger) + cli, err := cliv2.NewCLIv2(config, discardLogger, getRuntimeInfo(t)) assert.NoError(t, err) config.Set(configuration.TIMEOUT, 1) diff --git a/cliv2/internal/constants/constants.go b/cliv2/internal/constants/constants.go index c06f4808cd..185ad09b35 100644 --- a/cliv2/internal/constants/constants.go +++ b/cliv2/internal/constants/constants.go @@ -19,6 +19,7 @@ const SNYK_NPM_ALL_PROXY = "ALL_PROXY" const SNYK_CA_CERTIFICATE_LOCATION_ENV = "NODE_EXTRA_CA_CERTS" const SNYK_INTERNAL_NO_PROXY = "localhost,127.0.0.1,::1" const SNYK_OAUTH_ACCESS_TOKEN_ENV = "SNYK_OAUTH_TOKEN" +const SNYK_API_TOKEN_ENV = "SNYK_TOKEN" const SNYK_ANALYTICS_DISABLED_ENV = "SNYK_DISABLE_ANALYTICS" const SNYK_INTERNAL_ORGID_ENV = "SNYK_INTERNAL_ORGID" const SNYK_INTERNAL_PREVIEW_FEATURES_ENABLED = "SNYK_INTERNAL_PREVIEW_FEATURES" diff --git a/cliv2/internal/embedded/cliv1/cliv1.go b/cliv2/internal/embedded/cliv1/cliv1.go index fa6b3a924e..fe1c107235 100644 --- a/cliv2/internal/embedded/cliv1/cliv1.go +++ b/cliv2/internal/embedded/cliv1/cliv1.go @@ -2,11 +2,9 @@ package cliv1 import ( _ "embed" - "path" "strings" "github.com/snyk/cli/cliv2/internal/embedded" - "github.com/snyk/cli/cliv2/internal/utils" ) // The actual version gets injected at build time @@ -16,15 +14,6 @@ func CLIV1Version() string { return strings.TrimSpace(snykCLIVersion) } -// Get the full path to where we expect the CLIv1 to be in the cache -// If it doesn't exist, this is the path where we will then extract it -func GetFullCLIV1TargetPath(cacheDir string) (string, error) { - cliv1Filename := getCLIv1Filename() - versionTag := CLIV1Version() - fullPath := path.Join(utils.GetVersionCacheDirectory(cacheDir, versionTag), cliv1Filename) - return fullPath, nil -} - func ExtractTo(targetFullPath string) error { return embedded.ExtractBytesToTarget(snykCLIBytes, targetFullPath) } diff --git a/cliv2/internal/embedded/cliv1/dummy_embedded_legacy_cli.go b/cliv2/internal/embedded/cliv1/dummy_embedded_legacy_cli.go index 172016b75e..2a1c679120 100644 --- a/cliv2/internal/embedded/cliv1/dummy_embedded_legacy_cli.go +++ b/cliv2/internal/embedded/cliv1/dummy_embedded_legacy_cli.go @@ -10,7 +10,7 @@ import ( var snykCLIBytes []byte = []byte("\n") -func getCLIv1Filename() string { +func GetCLIv1Filename() string { return "FILENAME" } diff --git a/cliv2/internal/embedded/cliv1/embedded_binary_template.txt b/cliv2/internal/embedded/cliv1/embedded_binary_template.txt index 63a6cdf253..77028a70e9 100644 --- a/cliv2/internal/embedded/cliv1/embedded_binary_template.txt +++ b/cliv2/internal/embedded/cliv1/embedded_binary_template.txt @@ -8,7 +8,7 @@ import ( //go:embed FILENAME var snykCLIBytes []byte -func getCLIv1Filename() string { +func GetCLIv1Filename() string { return "FILENAME" } diff --git a/cliv2/internal/proxy/proxy.go b/cliv2/internal/proxy/proxy.go index 7e945c20de..9d96a49299 100644 --- a/cliv2/internal/proxy/proxy.go +++ b/cliv2/internal/proxy/proxy.go @@ -58,6 +58,7 @@ const ( type CaData struct { CertPool *x509.CertPool CertFile string + CertPem string } func InitCA(config configuration.Configuration, cliVersion string, logger *zerolog.Logger) (*CaData, error) { @@ -70,8 +71,8 @@ func InitCA(config configuration.Configuration, cliVersion string, logger *zerol return nil, err } - tmpDirectory := utils.GetTemporaryDirectory(cacheDirectory, cliVersion) - err = utils.CreateAllDirectories(cacheDirectory, cliVersion) + tmpDirectory := config.GetString(configuration.TEMP_DIR_PATH) + err = pkg_utils.CreateAllDirectories(cacheDirectory, cliVersion) if err != nil { return nil, err } @@ -125,6 +126,7 @@ func InitCA(config configuration.Configuration, cliVersion string, logger *zerol return &CaData{ CertPool: rootCAs, CertFile: certificateLocation, + CertPem: certPEMString, }, nil } diff --git a/cliv2/internal/proxy/proxy_test.go b/cliv2/internal/proxy/proxy_test.go index 3f070dadb4..47ddd6c564 100644 --- a/cliv2/internal/proxy/proxy_test.go +++ b/cliv2/internal/proxy/proxy_test.go @@ -21,7 +21,6 @@ import ( "github.com/snyk/cli/cliv2/internal/constants" "github.com/snyk/cli/cliv2/internal/proxy" - "github.com/snyk/cli/cliv2/internal/utils" "github.com/snyk/cli/cliv2/pkg/basic_workflows" ) @@ -71,10 +70,12 @@ func helper_getHttpClient(gateway *proxy.WrapperProxy, useProxyAuth bool) (*http func setup(t *testing.T, baseCache string, version string) configuration.Configuration { t.Helper() - err := utils.CreateAllDirectories(baseCache, version) + err := gafUtils.CreateAllDirectories(baseCache, version) assert.Nil(t, err) config := configuration.NewInMemory() config.Set(configuration.CACHE_PATH, baseCache) + config.Set(basic_workflows.ConfigurationCleanupGlobalTempDirectory, true) + config.Set(basic_workflows.ConfigurationCleanupGlobalCertAuthority, true) caData, err = basic_workflows.GetGlobalCertAuthority(config, &debugLogger) assert.Nil(t, err) return config @@ -83,7 +84,9 @@ func setup(t *testing.T, baseCache string, version string) configuration.Configu func teardown(t *testing.T, baseCache string) { t.Helper() err := os.RemoveAll(baseCache) - basic_workflows.CleanupGlobalCertAuthority(&debugLogger) + config := setup(t, "testcache", "1.1.1") + + basic_workflows.CleanupGlobalCertAuthority(config, &debugLogger) assert.Nil(t, err) } @@ -97,7 +100,7 @@ func Test_CleanupCertFile(t *testing.T) { assert.FileExistsf(t, caData.CertFile, "CertFile exist") - basic_workflows.CleanupGlobalCertAuthority(&debugLogger) + basic_workflows.CleanupGlobalCertAuthority(config, &debugLogger) assert.NoFileExists(t, caData.CertFile, "CertFile does not exist anymore") } @@ -121,7 +124,7 @@ func Test_canGoThroughProxy(t *testing.T) { proxiedClient, err := helper_getHttpClient(wp, useProxyAuth) assert.Nil(t, err) - res, err := proxiedClient.Get("https://static.snyk.io/cli/latest/version") + res, err := proxiedClient.Get("https://downloads.snyk.io/cli/latest/version") if err != nil { t.Fatal(err) } @@ -149,7 +152,7 @@ func Test_proxyRejectsWithoutBasicAuthHeader(t *testing.T) { proxiedClient, err := helper_getHttpClient(wp, useProxyAuth) assert.Nil(t, err) - res, err := proxiedClient.Get("https://static.snyk.io/cli/latest/version") + res, err := proxiedClient.Get("https://downloads.snyk.io/cli/latest/version") assert.Nil(t, res) assert.NotNil(t, err) assert.Contains(t, err.Error(), "Proxy Authentication Required") @@ -265,7 +268,7 @@ func Test_proxyPropagatesAuthFailureHeader(t *testing.T) { proxiedClient, err := helper_getHttpClient(wp, useProxyAuth) assert.Nil(t, err) - res, err := proxiedClient.Get("https://static.snyk.io/cli/latest/version") + res, err := proxiedClient.Get("https://downloads.snyk.io/cli/latest/version") assert.Nil(t, err) // Assert that the proxy propagates the auth failed marker header to the response. assert.Equal(t, res.Header.Get("snyk-auth-failed"), "true") diff --git a/cliv2/internal/utils/directories.go b/cliv2/internal/utils/directories.go index bc6a5a598c..97ef16555c 100644 --- a/cliv2/internal/utils/directories.go +++ b/cliv2/internal/utils/directories.go @@ -1,40 +1,9 @@ package utils import ( - "fmt" - "os" "path" - - "github.com/pkg/errors" ) -const CACHEDIR_PERMISSION = 0755 - -// The directory structure used to cache things into -// - Base cache directory (user definable, default depends on OS, exmple: /Users/username/Library/Caches/snyk/) -// |- Version cache directory (example: /Users/username/Library/Caches/snyk/snyk-cli/1.1075.0/) -// |- Temp directory (example: /Users/username/Library/Caches/snyk/snyk-cli/1.1075.0/tmp/) - -func GetTemporaryDirectory(baseCacheDirectory string, versionNumber string) string { - pid := os.Getpid() - return path.Join(GetVersionCacheDirectory(baseCacheDirectory, versionNumber), "tmp", fmt.Sprintf("pid%d", pid)) -} - func GetVersionCacheDirectory(baseCacheDirectory string, versionNumber string) string { return path.Join(baseCacheDirectory, versionNumber) } - -func CreateAllDirectories(baseCacheDirectory string, versionNumber string) error { - directoryList := []string{ - GetTemporaryDirectory(baseCacheDirectory, versionNumber), - } - - for _, dir := range directoryList { - err := os.MkdirAll(dir, CACHEDIR_PERMISSION) - if err != nil { - return errors.Wrap(err, "failed to create all directories.") - } - } - - return nil -} diff --git a/cliv2/pkg/basic_workflows/globalresources.go b/cliv2/pkg/basic_workflows/globalresources.go index 0a5caf6fc7..118dc16c7a 100644 --- a/cliv2/pkg/basic_workflows/globalresources.go +++ b/cliv2/pkg/basic_workflows/globalresources.go @@ -1,12 +1,16 @@ package basic_workflows import ( + "errors" + "fmt" + "io/fs" "os" "sync" "github.com/rs/zerolog" "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/workflow" + "github.com/spf13/pflag" "github.com/snyk/cli/cliv2/internal/cliv2" @@ -19,7 +23,14 @@ var caMutex sync.Mutex var WORKFLOWID_GLOBAL_CLEANUP workflow.Identifier = workflow.NewWorkflowIdentifier("internal.cleanup") +const ( + ConfigurationCleanupGlobalCertAuthority = "internal_cleanup_global_cert_auth_enabled" + ConfigurationCleanupGlobalTempDirectory = "internal_cleanup_global_temp_dir_enabled" +) + func initCleanup(engine workflow.Engine) error { + engine.GetConfiguration().AddDefaultValue(ConfigurationCleanupGlobalCertAuthority, configuration.StandardDefaultValueFunction(true)) + engine.GetConfiguration().AddDefaultValue(ConfigurationCleanupGlobalTempDirectory, configuration.StandardDefaultValueFunction(true)) entry, err := engine.Register(WORKFLOWID_GLOBAL_CLEANUP, workflow.ConfigurationOptionsFromFlagset(pflag.NewFlagSet("cleanup", pflag.ContinueOnError)), globalCleanupWorkflow) if err != nil { return err @@ -36,13 +47,19 @@ func globalCleanupWorkflow( logger := invocation.GetEnhancedLogger() config := invocation.GetConfiguration() - CleanupGlobalCertAuthority(logger) + CleanupGlobalCertAuthority(config, logger) CleanupGlobalTempDirectory(config, logger) return output, err } -func CleanupGlobalCertAuthority(debugLogger *zerolog.Logger) { +func CleanupGlobalCertAuthority(config configuration.Configuration, debugLogger *zerolog.Logger) { + enabled := config.GetBool(ConfigurationCleanupGlobalCertAuthority) + if !enabled { + debugLogger.Print("Cleanup of global certificate authority is disabled") + return + } + caMutex.Lock() defer caMutex.Unlock() if caSingleton != nil { @@ -62,12 +79,29 @@ func GetGlobalCertAuthority(config configuration.Configuration, debugLogger *zer caMutex.Lock() defer caMutex.Unlock() + createCA := false + if caSingleton == nil { + createCA = true + } else if _, existsError := os.Stat(caSingleton.CertFile); errors.Is(existsError, fs.ErrNotExist) { // certificate file does not exist + if len(caSingleton.CertPem) > 0 && len(caSingleton.CertFile) > 0 { // try to re-create file + debugLogger.Printf("Restoring temporary certificate file: %s", caSingleton.CertFile) + err := utils.WriteToFile(caSingleton.CertFile, caSingleton.CertPem) + if err != nil { + debugLogger.Printf("Failed to write cert to file: %s", caSingleton.CertFile) + return proxy.CaData{}, err + } + } else { // fail for this unexpected case + return proxy.CaData{}, fmt.Errorf("used Certificate Authority is not existing anymore!") + } + } + + if createCA { + debugLogger.Print("Creating new Certificate Authority") tmp, err := proxy.InitCA(config, cliv2.GetFullVersion(), debugLogger) if err != nil { return proxy.CaData{}, err } - caSingleton = tmp } @@ -75,7 +109,13 @@ func GetGlobalCertAuthority(config configuration.Configuration, debugLogger *zer } func CleanupGlobalTempDirectory(config configuration.Configuration, debugLogger *zerolog.Logger) { - tmpDirectory := utils.GetTemporaryDirectory(config.GetString(configuration.CACHE_PATH), cliv2.GetFullVersion()) + enabled := config.GetBool(ConfigurationCleanupGlobalTempDirectory) + if !enabled { + debugLogger.Print("Cleanup of global temporary directory is disabled") + return + } + + tmpDirectory := config.GetString(configuration.TEMP_DIR_PATH) err := os.RemoveAll(tmpDirectory) if err != nil { debugLogger.Print("Failed to delete temporary directory: ", tmpDirectory) diff --git a/cliv2/pkg/basic_workflows/globalresources_test.go b/cliv2/pkg/basic_workflows/globalresources_test.go index 2f91f59b49..4988b8cfcb 100644 --- a/cliv2/pkg/basic_workflows/globalresources_test.go +++ b/cliv2/pkg/basic_workflows/globalresources_test.go @@ -1,17 +1,19 @@ package basic_workflows import ( + "os" "sync" "testing" "time" + "github.com/rs/zerolog" "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/workflow" "github.com/spf13/pflag" "github.com/stretchr/testify/assert" ) -func Test_(t *testing.T) { +func Test_ParallelGetGobalCertAuthority(t *testing.T) { var mu sync.Mutex caCertFile := "" @@ -72,3 +74,53 @@ func Test_(t *testing.T) { assert.NoFileExists(t, caCertFile, "Cert file") } + +func Test_RestoreCertAuthority(t *testing.T) { + config := configuration.NewInMemory() + // set as we don't call initCleanup() + config.Set(ConfigurationCleanupGlobalCertAuthority, true) + logger := zerolog.New(os.Stderr) + + ca1, err := GetGlobalCertAuthority(config, &logger) + + assert.NoError(t, err) + assert.FileExists(t, ca1.CertFile) + + t.Run("manual removal of file", func(t *testing.T) { + os.Remove(ca1.CertFile) + + ca2, err := GetGlobalCertAuthority(config, &logger) + assert.NoError(t, err) + assert.FileExists(t, ca2.CertFile) + assert.Equal(t, ca1.CertFile, ca2.CertFile) + }) + + t.Run("manual removal of file and deletion of cached values", func(t *testing.T) { + os.Remove(ca1.CertFile) + caSingleton.CertPem = "" + caSingleton.CertFile = "" + + ca2, err := GetGlobalCertAuthority(config, &logger) + assert.Error(t, err) + assert.NotEqual(t, ca1.CertFile, ca2.CertFile) + }) + + t.Run("use cleanup function", func(t *testing.T) { + CleanupGlobalCertAuthority(config, &logger) + + ca2, err := GetGlobalCertAuthority(config, &logger) + assert.NoError(t, err) + assert.FileExists(t, ca2.CertFile) + assert.NotEqual(t, ca1.CertFile, ca2.CertFile) + }) + + t.Run("skips cleanup function", func(t *testing.T) { + config.Set(ConfigurationCleanupGlobalCertAuthority, false) + CleanupGlobalCertAuthority(config, &logger) + + ca2, err := GetGlobalCertAuthority(config, &logger) + assert.NoError(t, err) + assert.FileExists(t, ca2.CertFile) + assert.NotEqual(t, ca1.CertFile, ca2.CertFile) + }) +} diff --git a/cliv2/pkg/basic_workflows/legacycli.go b/cliv2/pkg/basic_workflows/legacycli.go index 5b4f27a685..6eb3ac0136 100644 --- a/cliv2/pkg/basic_workflows/legacycli.go +++ b/cliv2/pkg/basic_workflows/legacycli.go @@ -8,7 +8,6 @@ import ( "github.com/pkg/errors" "github.com/rs/zerolog" - "github.com/snyk/go-application-framework/pkg/auth" "github.com/snyk/go-application-framework/pkg/configuration" "github.com/snyk/go-application-framework/pkg/logging" "github.com/snyk/go-application-framework/pkg/networking" @@ -75,8 +74,8 @@ func legacycliWorkflow( debugLogger := invocation.GetEnhancedLogger() // uses zerolog debugLoggerDefault := invocation.GetLogger() // uses log networkAccess := invocation.GetNetworkAccess() + ri := invocation.GetRuntimeInfo() - oauthIsAvailable := config.GetBool(configuration.FF_OAUTH_AUTH_FLOW_ENABLED) args := config.GetStringSlice(configuration.RAW_CMD_ARGS) useStdIo := config.GetBool(configuration.WORKFLOW_USE_STDIO) isDebug := config.GetBool(configuration.DEBUG) @@ -91,7 +90,7 @@ func legacycliWorkflow( // init cli object var cli *cliv2.CLI - cli, err = cliv2.NewCLIv2(config, debugLoggerDefault) + cli, err = cliv2.NewCLIv2(config, debugLoggerDefault, ri) if err != nil { return output, err } @@ -104,23 +103,14 @@ func legacycliWorkflow( cli.AppendEnvironmentVariables(env) } - if oauthIsAvailable { - // The Legacy CLI doesn't support oauth authentication. Oauth authentication is implemented in the Extensible CLI and is added - // to the legacy CLI by forwarding network traffic through the internal proxy of the Extensible CLI. - // The legacy CLI always expects some sort of token to be available, otherwise some functionality isn't available. This is why we inject - // a random token value to bypass these checks and replace the proper authentication headers in the internal proxy. - // Injecting the real token here and not in the proxy would create an issue when the token expires during CLI execution. - if oauth := config.GetString(auth.CONFIG_KEY_OAUTH_TOKEN); len(oauth) > 0 { - envMap := pkg_utils.ToKeyValueMap(os.Environ(), "=") - if _, ok := envMap[constants.SNYK_OAUTH_ACCESS_TOKEN_ENV]; !ok { - env := []string{constants.SNYK_OAUTH_ACCESS_TOKEN_ENV + "=randomtoken"} - cli.AppendEnvironmentVariables(env) - debugLogger.Print("Authentication: Oauth token handling delegated to Extensible CLI.") - } else { - debugLogger.Print("Authentication: Using oauth token from Environment Variable.") - } - } + // In general all authentication if handled through the Extensible CLI now. But there is some legacy logic + // that checks for an API token to be available. Until this logic is safely removed, we will be injecting a + // fake/random API token to bypass this logic. + apiToken := config.GetString(configuration.AUTHENTICATION_TOKEN) + if len(apiToken) == 0 { + apiToken = "random" } + cli.AppendEnvironmentVariables([]string{constants.SNYK_API_TOKEN_ENV + "=" + apiToken}) err = cli.Init() if err != nil { diff --git a/dangerfile.js b/dangerfile.js index 2977903aad..cd298cb1be 100644 --- a/dangerfile.js +++ b/dangerfile.js @@ -4,7 +4,8 @@ const fs = require('fs'); const MAX_COMMIT_MESSAGE_LENGTH = 72; function checkCommitMessage(commitMessage, url) { - const firstLineRegex = /^Merge.*|(feat|fix|chore|test|docs|refactor|revert)(\([a-z0-9-_]+\))?:(.+)$/; + const firstLineRegex = + /^Merge.*|(feat|fix|chore|test|docs|refactor|revert)(\([a-z0-9-_]+\))?:(.+)$/; if (!firstLineRegex.test(commitMessage)) { fail( `"[${commitMessage}](${url})" is not using a valid commit message format. For commit guidelines, see: [CONTRIBUTING](https://github.com/snyk/snyk/blob/main/CONTRIBUTING.md#creating-commits).`, @@ -33,6 +34,10 @@ if (danger.github && danger.github.pr) { const prTitle = danger.github.pr.title; checkCommitMessage(prTitle, danger.github.pr.html_url); + + warn( + `There are multiple commits on your branch, please squash them locally before merging!`, + ); } // Forgotten tests check diff --git a/help/cli-commands/auth.md b/help/cli-commands/auth.md index 3d77c88d40..63bbc98425 100644 --- a/help/cli-commands/auth.md +++ b/help/cli-commands/auth.md @@ -18,7 +18,7 @@ OAuth provides improved security by issuing shorter-lived expiring authorization Earlier versions of the Snyk CLI (< 1.1293) obtained a non-expiring API token through a legacy browser interaction. -This authentication method is deprecated but you may use it in the event of problems with OAuth by running `snyk auth --auth-type=token`. A future release of the CLI will remove this option. +The Snyk API token can still be used as a fallback option. You must explicitly add an option to enable it as follows: `snyk auth --auth-type=token`. ## Options @@ -26,19 +26,19 @@ This authentication method is deprecated but you may use it in the event of prob Specify the \ of authentication to use. Supported types are `oauth` (the default beginning with version 1.1293.0) AND `token`. -### `--client-secret=` - -### `--client-id=` +### `--client-secret=` and `--client-id=` You can set the client secret and the id can be set in order to use the [OAuth2 Client Credentials Grant](https://docs.snyk.io/enterprise-configuration/service-accounts/service-accounts-using-oauth-2.0#oauth-2.0-with-client-secret) Both values must be provided together. They are only valid together with `--auth-type=oauth;`otherwise they will be ignored. -## Value +For information about how to get the `` and the ``, see [Service accounts using OAuth 2.0](https://docs.snyk.io/enterprise-setup/service-accounts/service-accounts-using-oauth-2.0#oauth-2.0-with-client-secret) + +## Token value In some environments and configurations, you must use the ``; see [Authenticate the CLI with your account](https://docs.snyk.io/snyk-cli/authenticate-the-cli-with-your-account) -The value may be a user token or a service account; see [Service accounts](https://docs.snyk.io/enterprise-setup/service-accounts) +The value may be a user token or a service account token; see [Service accounts](https://docs.snyk.io/enterprise-setup/service-accounts) In a CI/CD environment, use the `SNYK_TOKEN` environment variable; see [Configure the Snyk CLI](https://docs.snyk.io/snyk-cli/configure-the-snyk-cli) diff --git a/help/cli-commands/code-test.md b/help/cli-commands/code-test.md index 2002878edc..480808d319 100644 --- a/help/cli-commands/code-test.md +++ b/help/cli-commands/code-test.md @@ -6,7 +6,7 @@ ## Description -The `snyk code test` command tests for any known security issues using Static Code Analysis. +The `snyk code test` command tests source code for any known security issues (Static Application Security Testing). ## Exit codes @@ -67,7 +67,7 @@ Save test output as a JSON data structure directly to the specified file, regard Use to display the human-readable test output using stdout and, at the same time, save the JSON data structure output to a file. -For SAST, if no issues are found, Snyk does not create a `json` file. In contrast, for open-source, Snyk creates a file whether or not issues are found. +For SAST, if no issues are found, Snyk does not create a `json` file. In contrast, for open-source, Snyk creates a file whether or not issues are found. Example: `$ snyk code test --json-file-output=vuln.json` diff --git a/help/cli-commands/code.md b/help/cli-commands/code.md index 05716848d1..2fb38a6dab 100644 --- a/help/cli-commands/code.md +++ b/help/cli-commands/code.md @@ -10,4 +10,4 @@ For more information, see [CLI for Snyk Code](https://docs.snyk.io/snyk-cli/scan The single`snyk code` command is identified here with the help options: -[`code test`](code-test.md); `code test --help`: tests for any known security issues using Static Code Analysis +[`code test`](code-test.md); `code test --help`: Test source code for any known security issues (Static Application Security Testing). diff --git a/help/cli-commands/config-environment.md b/help/cli-commands/config-environment.md index 7ff666e557..8a45d9ee64 100644 --- a/help/cli-commands/config-environment.md +++ b/help/cli-commands/config-environment.md @@ -2,6 +2,8 @@ **Note:** This command will be available as of CLI version 1.1293.0. +If you are not in the system default environment, SNYK-US-01, use the `snyk config environment` command to set your environment before you run `snyk auth`. + ## Usage `snyk config environment ` diff --git a/help/cli-commands/config.md b/help/cli-commands/config.md index b46c698e60..e9976f803a 100644 --- a/help/cli-commands/config.md +++ b/help/cli-commands/config.md @@ -34,6 +34,10 @@ Remove a config value. Remove all config values. +### `environment` + +Change the endpoint to use. Run `config environment --help` or see the [Config environment help page](https://docs.snyk.io/snyk-cli/commands/config-environment) + ## Supported `` values ### `api` diff --git a/help/cli-commands/iac-test.md b/help/cli-commands/iac-test.md index ba3a5916cd..14ee7913cc 100644 --- a/help/cli-commands/iac-test.md +++ b/help/cli-commands/iac-test.md @@ -89,7 +89,7 @@ Save test output in SARIF format directly to the \ file, regar This is especially useful if you want to display the human-readable test output using stdout and at the same time save the SARIF format output to a file. -Note: If you use an option that sets project attributes and your role lacks permission to edit project attributes the `iac test` command fails. For instructions on how to proceed see [Editing project attributes from the Snyk CLI](https://docs.snyk.io/features/user-and-group-management/managing-users-and-permissions/managing-permissions#editing-project-attributes-from-the-snyk-cli) +Note: If you use an option that sets project attributes and your role lacks permission to edit project attributes the `iac test` command fails. For instructions on how to proceed see [Permissions required to edit Project attributes from the Snyk CLI](https://docs.snyk.io/snyk-admin/user-roles/user-role-management#permissions-required-to-edit-project-attributes-from-the-snyk-cli) ### `--project-business-criticality=[,]...>` diff --git a/help/cli-commands/monitor.md b/help/cli-commands/monitor.md index d09f11b131..6f63dfa54f 100644 --- a/help/cli-commands/monitor.md +++ b/help/cli-commands/monitor.md @@ -333,6 +333,24 @@ Control monitoring out-of-sync lockfiles. Default: true +## Options for pnpm projects + +**Snyk CLI pnpm support is in Early Access**. To enable it, in your Snyk account navigate to Settings, select Snyk Preview, and install CLI v1.1293.0 or above. + +**Note**: You can use the following options with pnpm projects: + +`--dev`. See the [`--dev` option help](https://docs.snyk.io/snyk-cli/commands/monitor#dev) + +`--all-projects` to scan and detect pnpm projects and all other projects in the directory. See the [`--all-projects` option help](https://docs.snyk.io/snyk-cli/commands/monitor#all-projects) + +`--prune-repeated-subdependencies, -p`. See the [--prune-repeated subdependencies option help](https://docs.snyk.io/snyk-cli/commands/monitor#prune-repeated-subdependencies-p) + +### `--strict-out-of-sync=true|false` + +Control monitoring out-of-sync lockfiles. + +Default: true + ## Options for Yarn projects **Note**: You can use the following options with Yarn projects: @@ -349,7 +367,7 @@ Default: true ### `--yarn-workspaces` -Detect and scan Yarn Workspaces only when a lockfile is in the root. +Detect and scan only Yarn Workspaces when a lockfile is in the root. You can specify how many sub-directories to search using `--detection-depth`. @@ -387,9 +405,15 @@ For a Python project, specify a particular file to monitor. Default: Snyk scans the requirements.txt file at the top level of the project. -Snyk can recognize any manifest files specified with this option based on `--file=req*.txt`. The `*` is a wildcard and `req` can appear anywhere in the file name. +**Important:** When specifying a value for the `--file` parameter that is not the default file, you must also include the `--package-manager=pip` option. The test will fail without this parameter. + +Always specify this parameter with the value `pip` when using a custom `--file` value. For example: + +```bash +snyk test --file=requirements-dev.txt --package-manager=pip +``` -For example, Snyk recognizes your manifest file when you have renamed it to `requirements-dev.txt`. +This allows Snyk to correctly recognize and scan your specified manifest file, such as when you have renamed it to `requirements-dev.txt`. ### `--package-manager=pip` diff --git a/help/cli-commands/test.md b/help/cli-commands/test.md index bcd4943213..b294c69be6 100644 --- a/help/cli-commands/test.md +++ b/help/cli-commands/test.md @@ -122,7 +122,7 @@ For more information see the article [How to select the Organization to use in t Specify a package file. -When testing locally or monitoring a project, you can specify the file that Snyk should inspect for package information. When the file is not specified, Snyk tries to detect the appropriate file for your project. +When you are testing locally or monitoring a project, you can specify the file that Snyk should inspect for package information. When the file is not specified, Snyk tries to detect the appropriate file for your project. See also the section on [Options for Python projects](https://docs.snyk.io/snyk-cli/commands/test#options-for-python-projects) @@ -368,6 +368,28 @@ If there are out-of-sync lockfiles in the project, the `test` command fails when Default: true +## Options for pnpm projects + +**Snyk CLI pnpm support is in Early Access**. To enable it, in your Snyk account navigate to Settings, select Snyk Preview, and install CLI v1.1293.0 or above. + +**Note**: You can use the following options with pnpm projects: + +`--dev`. See the [`--dev` option help](https://docs.snyk.io/snyk-cli/commands/test#dev) + +`--all-projects` to scan and detect pnpm projects and all other projects in the directory. See the [`--all-projects` option help](https://docs.snyk.io/snyk-cli/commands/test#all-projects) + +`--fail-on`. See the [--fail-on option help](https://docs.snyk.io/snyk-cli/commands/test#fail-on-less-than-all-or-upgradable-or-patchable-greater-than) + +`--prune-repeated-subdependencies, -p`. See the [`--prune-repeated subdependencies` option help](https://docs.snyk.io/snyk-cli/commands/test#prune-repeated-subdependencies-p) + +### `--strict-out-of-sync=true|false` + +Prevent testing out-of-sync lockfiles. + +If there are out-of-sync lockfiles in the project, the `test` command fails when `--strict-out-of-sync=true`. + +Default: true + ## Options for Yarn projects **Note**: You can use the following options with Yarn projects: @@ -388,7 +410,7 @@ Default: true ### `--yarn-workspaces` -Detect and scan Yarn Workspaces only when a lockfile is in the root. +Detect and scan only Yarn Workspaces when a lockfile is in the root. You can specify how many sub-directories to search using `--detection-depth.` @@ -426,9 +448,15 @@ For a Python project, specify a particular file to test. Default: Snyk scans the requirements.txt file at the top level of the project. -Snyk can recognize any manifest files specified with this option based on `--file=req*.txt`. The `*` is a wildcard and `req` can appear anywhere in the file name. +**Important:** When specifying a value for the `--file` parameter that is not the default file, you must also include the `--package-manager=pip` option. The test will fail without this parameter. + +Always specify this parameter with the value `pip` when using a custom `--file` value. For example: + +```bash +snyk test --file=requirements-dev.txt --package-manager=pip +``` -For example, Snyk recognizes your manifest file when you have renamed it to `requirements-dev.txt`. +This allows Snyk to correctly recognize and scan your specified manifest file, such as when you have renamed it to `requirements-dev.txt`. ### `--package-manager=pip` diff --git a/package-lock.json b/package-lock.json index a3ab750552..2fb7e99826 100644 --- a/package-lock.json +++ b/package-lock.json @@ -57,7 +57,7 @@ "lodash.upperfirst": "^4.3.1", "lodash.values": "^4.3.0", "marked": "^4.0.1", - "micromatch": "4.0.7", + "micromatch": "4.0.8", "needle": "^3.3.0", "open": "^7.0.3", "ora": "5.4.0", @@ -72,14 +72,14 @@ "snyk-go-plugin": "1.23.0", "snyk-gradle-plugin": "4.1.0", "snyk-module": "3.1.0", - "snyk-mvn-plugin": "3.5.0", - "snyk-nodejs-lockfile-parser": "1.58.7", - "snyk-nodejs-plugin": "1.3.2", - "snyk-nuget-plugin": "2.7.6", + "snyk-mvn-plugin": "3.6.0", + "snyk-nodejs-lockfile-parser": "1.58.10", + "snyk-nodejs-plugin": "1.3.3", + "snyk-nuget-plugin": "2.7.8", "snyk-php-plugin": "1.9.2", - "snyk-policy": "^1.25.0", + "snyk-policy": "^4.0.0", "snyk-python-plugin": "2.2.1", - "snyk-resolve-deps": "4.7.3", + "snyk-resolve-deps": "4.8.0", "snyk-sbt-plugin": "2.18.1", "snyk-swiftpm-plugin": "1.4.1", "strip-ansi": "^6.0.1", @@ -125,7 +125,7 @@ "npm-run-all": "^4.1.5", "patch-package": "^6.5.0", "pkg": "5.8.0", - "prettier": "^1.18.2", + "prettier": "^3.3.3", "proxyquire": "^1.7.4", "sinon": "^4.0.0", "tap": "^19.0.2", @@ -15452,11 +15452,6 @@ "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=" }, - "node_modules/lodash.assignin": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.assignin/-/lodash.assignin-4.2.0.tgz", - "integrity": "sha1-uo31+4QesKPoBEIysOJjqNxqKKI=" - }, "node_modules/lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", @@ -15650,7 +15645,8 @@ "node_modules/lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", - "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=" + "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=", + "dev": true }, "node_modules/lodash.size": { "version": "4.2.0", @@ -16067,9 +16063,9 @@ } }, "node_modules/micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dependencies": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -18272,15 +18268,18 @@ } }, "node_modules/prettier": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", - "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", "dev": true, "bin": { - "prettier": "bin-prettier.js" + "prettier": "bin/prettier.cjs" }, "engines": { - "node": ">=4" + "node": ">=14" + }, + "funding": { + "url": "https://github.com/prettier/prettier?sponsor=1" } }, "node_modules/pretty-bytes": { @@ -20767,9 +20766,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/snyk-mvn-plugin": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-3.5.0.tgz", - "integrity": "sha512-mdCLfPsFpPLaCxw7wp2xGOF4L+o7SxHOsmm3BQ9ktdownex+XS3GhGx8C/4o4vMQ2vOr46dGTKuq1QGO+HQgUg==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-3.6.0.tgz", + "integrity": "sha512-CATLJXmgQhyIntcvQRF59hlX6DA1s2w+HpfMOrz++HvmaWioe/H0A4d1jTg0rzdh3AonhFq9nvj8n9LdDDZ7JQ==", "dependencies": { "@snyk/cli-interface": "2.11.3", "@snyk/dep-graph": "^1.23.1", @@ -20872,9 +20871,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/snyk-nodejs-lockfile-parser": { - "version": "1.58.7", - "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-1.58.7.tgz", - "integrity": "sha512-pZaAb01yglc+O8YzpoiHOqNXZvwYhRNiwwXiOJ8i1oLUN2PT4+LQLM4fSBsH8w14Vp3EVE/BbC933yme2Vpgyg==", + "version": "1.58.10", + "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-1.58.10.tgz", + "integrity": "sha512-lTNz75FOsZyCWfpDshHNvfqxMTj4ikTEVBaX8WOcI+wvYrMswMN2c2wPHwFasdumHJh+kbqLf5bJKBMioSwcFw==", "dependencies": { "@snyk/dep-graph": "^2.3.0", "@snyk/error-catalog-nodejs-public": "^5.16.0", @@ -20975,9 +20974,9 @@ "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" }, "node_modules/snyk-nodejs-plugin": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-1.3.2.tgz", - "integrity": "sha512-xZhtlQBr97K8osQzNwxTckh+d9mFV+mp0gKPCQCjSFYUQeppRYAEKm3Uc9V2k9CilXWidsAqrgBzPHRTBUK3ZQ==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-1.3.3.tgz", + "integrity": "sha512-QWvN9mZzbYJAYP1oog8HITfWMyGsR7jey2BphnKvi+mgfq9VgopixryvhXv63m8jMCiTQZEUSTGd2qbR2uJVnA==", "dependencies": { "@snyk/cli-interface": "^2.13.0", "@snyk/dep-graph": "^2.7.4", @@ -20987,7 +20986,7 @@ "lodash.isempty": "^4.4.0", "lodash.sortby": "^4.7.0", "micromatch": "4.0.7", - "snyk-nodejs-lockfile-parser": "1.58.7", + "snyk-nodejs-lockfile-parser": "1.58.10", "snyk-resolve-deps": "4.8.0" }, "engines": { @@ -21041,6 +21040,18 @@ "yallist": "^2.1.2" } }, + "node_modules/snyk-nodejs-plugin/node_modules/micromatch": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", + "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, "node_modules/snyk-nodejs-plugin/node_modules/semver": { "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", @@ -21081,9 +21092,9 @@ "integrity": "sha512-ncTzHV7NvsQZkYe1DW7cbDLm0YpzHmZF5r/iyP3ZnQtMiJ+pjzisCiMNI+Sj+xQF5pXhSHxSB3uDbsBTzY/c2A==" }, "node_modules/snyk-nuget-plugin": { - "version": "2.7.6", - "resolved": "https://registry.npmjs.org/snyk-nuget-plugin/-/snyk-nuget-plugin-2.7.6.tgz", - "integrity": "sha512-be7YNaYOqNOO33HO2M3lIPnnyOHiZcmap/qgI36auQzhcYCa3jY+EP1vYs3w0Af62O5IPxgF/r4itLDsT43R+A==", + "version": "2.7.8", + "resolved": "https://registry.npmjs.org/snyk-nuget-plugin/-/snyk-nuget-plugin-2.7.8.tgz", + "integrity": "sha512-FufhAyRXKky50fMpLwQ9n6MLchv0S+q4lTyvNsSd+yJWnDHLjI/AMnN8hM6cXwsJtl8DwMDWvXu0rUGNES0Brg==", "dependencies": { "@snyk/cli-interface": "^2.14.0", "@snyk/dep-graph": "^2.8.1", @@ -21167,15 +21178,14 @@ "integrity": "sha512-77EbyPPpMz+FRFRuAFlWMtmgUWGe9UOG2Z25NqCwiIjRhOf5iKGuzSe5P2w1laq+FkRy4p+PCuVkJSGkzTEKVw==" }, "node_modules/snyk-policy": { - "version": "1.25.0", - "resolved": "https://registry.npmjs.org/snyk-policy/-/snyk-policy-1.25.0.tgz", - "integrity": "sha512-naAoqjlspwioBDlrSk5/pPGlSX2dMG42XDhoUdU/41NPS57jsifpENgiT83HEJDbTRGHOPTmQ1B4lvRupb70hQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/snyk-policy/-/snyk-policy-4.0.0.tgz", + "integrity": "sha512-xkXsDhnZS2zcB/BAKVZKR09ZTkJ4M/5eVyuVrV7+BFMy7bSv2EZPDulGGsrkUhXbQwkm7eW+FtccZABRRdct2w==", "dependencies": { "debug": "^4.1.1", "email-validator": "^2.0.4", "js-yaml": "^3.13.1", "lodash.clonedeep": "^4.5.0", - "promise-fs": "^2.1.1", "semver": "^7.3.4", "snyk-module": "^3.0.0", "snyk-resolve": "^1.1.0", @@ -21308,27 +21318,53 @@ } }, "node_modules/snyk-resolve-deps": { - "version": "4.7.3", - "resolved": "https://registry.npmjs.org/snyk-resolve-deps/-/snyk-resolve-deps-4.7.3.tgz", - "integrity": "sha512-UzPCDS4xzcSqahmTpC1o75aIX0t/1voj34X3D3yOn3FvY9doiXC02B4IILpPh75ZFtYgdqkOr7TS2lm/ltgEcg==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/snyk-resolve-deps/-/snyk-resolve-deps-4.8.0.tgz", + "integrity": "sha512-/pXaStapn8ldr68e1Bs2gmxoQpiB3fnjfZSfzY82bxedmSKzQgTJ5vhf1P9kALj3IBEb1wYaQ/MtNH5E9DK0/g==", + "license": "Apache-2.0", "dependencies": { "ansicolors": "^0.3.2", - "debug": "^4.1.1", - "lodash.assign": "^4.2.0", - "lodash.assignin": "^4.2.0", - "lodash.clone": "^4.5.0", - "lodash.flatten": "^4.4.0", - "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", - "lru-cache": "^4.0.0", - "semver": "^5.5.1", - "snyk-module": "^3.1.0", - "snyk-resolve": "^1.0.0", + "debug": "^4.3.4", + "lodash": "^4.17.21", + "lru-cache": "^4.1.5", + "semver": "^5.7.2", + "snyk-module": "^3.2.0", + "snyk-resolve": "^1.1.0", "snyk-tree": "^1.0.0", "snyk-try-require": "^2.0.2", "then-fs": "^2.0.0" } }, + "node_modules/snyk-resolve-deps/node_modules/hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "license": "ISC", + "dependencies": { + "lru-cache": "^6.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/snyk-resolve-deps/node_modules/hosted-git-info/node_modules/lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "license": "ISC", + "dependencies": { + "yallist": "^4.0.0" + }, + "engines": { + "node": ">=10" + } + }, + "node_modules/snyk-resolve-deps/node_modules/hosted-git-info/node_modules/yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==", + "license": "ISC" + }, "node_modules/snyk-resolve-deps/node_modules/lru-cache": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", @@ -21346,6 +21382,16 @@ "semver": "bin/semver" } }, + "node_modules/snyk-resolve-deps/node_modules/snyk-module": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/snyk-module/-/snyk-module-3.2.0.tgz", + "integrity": "sha512-6MLJyi4OMOZtCWTzGgRMEEw9qQ1fAwKoj5XYXfKOjIsohi3ubKsVfvSoScj0IovtiKowm2iCZ+VIRPJab6nCxA==", + "license": "Apache-2.0", + "dependencies": { + "debug": "^4.1.1", + "hosted-git-info": "^4.0.2" + } + }, "node_modules/snyk-resolve-deps/node_modules/yallist": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", @@ -36004,11 +36050,6 @@ "resolved": "https://registry.npmjs.org/lodash.assign/-/lodash.assign-4.2.0.tgz", "integrity": "sha1-DZnzzNem0mHRm9rrkkUAXShYCOc=" }, - "lodash.assignin": { - "version": "4.2.0", - "resolved": "https://registry.npmjs.org/lodash.assignin/-/lodash.assignin-4.2.0.tgz", - "integrity": "sha1-uo31+4QesKPoBEIysOJjqNxqKKI=" - }, "lodash.camelcase": { "version": "4.3.0", "resolved": "https://registry.npmjs.org/lodash.camelcase/-/lodash.camelcase-4.3.0.tgz", @@ -36202,7 +36243,8 @@ "lodash.set": { "version": "4.3.2", "resolved": "https://registry.npmjs.org/lodash.set/-/lodash.set-4.3.2.tgz", - "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=" + "integrity": "sha1-2HV7HagH3eJIFrDWqEvqGnYjCyM=", + "dev": true }, "lodash.size": { "version": "4.2.0", @@ -36516,9 +36558,9 @@ "dev": true }, "micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "requires": { "braces": "^3.0.3", "picomatch": "^2.3.1" @@ -38152,9 +38194,9 @@ "dev": true }, "prettier": { - "version": "1.19.1", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", - "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", + "version": "3.3.3", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-3.3.3.tgz", + "integrity": "sha512-i2tDNA0O5IrMO757lfrdQZCc2jPNDVntV0m/+4whiDfWaTKfMNgR7Qz0NAeGz/nRqF4m5/6CLzbP4/liHt12Ew==", "dev": true }, "pretty-bytes": { @@ -40021,9 +40063,9 @@ } }, "snyk-mvn-plugin": { - "version": "3.5.0", - "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-3.5.0.tgz", - "integrity": "sha512-mdCLfPsFpPLaCxw7wp2xGOF4L+o7SxHOsmm3BQ9ktdownex+XS3GhGx8C/4o4vMQ2vOr46dGTKuq1QGO+HQgUg==", + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/snyk-mvn-plugin/-/snyk-mvn-plugin-3.6.0.tgz", + "integrity": "sha512-CATLJXmgQhyIntcvQRF59hlX6DA1s2w+HpfMOrz++HvmaWioe/H0A4d1jTg0rzdh3AonhFq9nvj8n9LdDDZ7JQ==", "requires": { "@snyk/cli-interface": "2.11.3", "@snyk/dep-graph": "^1.23.1", @@ -40109,9 +40151,9 @@ } }, "snyk-nodejs-lockfile-parser": { - "version": "1.58.7", - "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-1.58.7.tgz", - "integrity": "sha512-pZaAb01yglc+O8YzpoiHOqNXZvwYhRNiwwXiOJ8i1oLUN2PT4+LQLM4fSBsH8w14Vp3EVE/BbC933yme2Vpgyg==", + "version": "1.58.10", + "resolved": "https://registry.npmjs.org/snyk-nodejs-lockfile-parser/-/snyk-nodejs-lockfile-parser-1.58.10.tgz", + "integrity": "sha512-lTNz75FOsZyCWfpDshHNvfqxMTj4ikTEVBaX8WOcI+wvYrMswMN2c2wPHwFasdumHJh+kbqLf5bJKBMioSwcFw==", "requires": { "@snyk/dep-graph": "^2.3.0", "@snyk/error-catalog-nodejs-public": "^5.16.0", @@ -40191,9 +40233,9 @@ } }, "snyk-nodejs-plugin": { - "version": "1.3.2", - "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-1.3.2.tgz", - "integrity": "sha512-xZhtlQBr97K8osQzNwxTckh+d9mFV+mp0gKPCQCjSFYUQeppRYAEKm3Uc9V2k9CilXWidsAqrgBzPHRTBUK3ZQ==", + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/snyk-nodejs-plugin/-/snyk-nodejs-plugin-1.3.3.tgz", + "integrity": "sha512-QWvN9mZzbYJAYP1oog8HITfWMyGsR7jey2BphnKvi+mgfq9VgopixryvhXv63m8jMCiTQZEUSTGd2qbR2uJVnA==", "requires": { "@snyk/cli-interface": "^2.13.0", "@snyk/dep-graph": "^2.7.4", @@ -40203,7 +40245,7 @@ "lodash.isempty": "^4.4.0", "lodash.sortby": "^4.7.0", "micromatch": "4.0.7", - "snyk-nodejs-lockfile-parser": "1.58.7", + "snyk-nodejs-lockfile-parser": "1.58.10", "snyk-resolve-deps": "4.8.0" }, "dependencies": { @@ -40247,6 +40289,15 @@ "yallist": "^2.1.2" } }, + "micromatch": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", + "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "requires": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + } + }, "semver": { "version": "5.7.2", "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", @@ -40286,9 +40337,9 @@ } }, "snyk-nuget-plugin": { - "version": "2.7.6", - "resolved": "https://registry.npmjs.org/snyk-nuget-plugin/-/snyk-nuget-plugin-2.7.6.tgz", - "integrity": "sha512-be7YNaYOqNOO33HO2M3lIPnnyOHiZcmap/qgI36auQzhcYCa3jY+EP1vYs3w0Af62O5IPxgF/r4itLDsT43R+A==", + "version": "2.7.8", + "resolved": "https://registry.npmjs.org/snyk-nuget-plugin/-/snyk-nuget-plugin-2.7.8.tgz", + "integrity": "sha512-FufhAyRXKky50fMpLwQ9n6MLchv0S+q4lTyvNsSd+yJWnDHLjI/AMnN8hM6cXwsJtl8DwMDWvXu0rUGNES0Brg==", "requires": { "@snyk/cli-interface": "^2.14.0", "@snyk/dep-graph": "^2.8.1", @@ -40363,15 +40414,14 @@ } }, "snyk-policy": { - "version": "1.25.0", - "resolved": "https://registry.npmjs.org/snyk-policy/-/snyk-policy-1.25.0.tgz", - "integrity": "sha512-naAoqjlspwioBDlrSk5/pPGlSX2dMG42XDhoUdU/41NPS57jsifpENgiT83HEJDbTRGHOPTmQ1B4lvRupb70hQ==", + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/snyk-policy/-/snyk-policy-4.0.0.tgz", + "integrity": "sha512-xkXsDhnZS2zcB/BAKVZKR09ZTkJ4M/5eVyuVrV7+BFMy7bSv2EZPDulGGsrkUhXbQwkm7eW+FtccZABRRdct2w==", "requires": { "debug": "^4.1.1", "email-validator": "^2.0.4", "js-yaml": "^3.13.1", "lodash.clonedeep": "^4.5.0", - "promise-fs": "^2.1.1", "semver": "^7.3.4", "snyk-module": "^3.0.0", "snyk-resolve": "^1.1.0", @@ -40477,27 +40527,45 @@ } }, "snyk-resolve-deps": { - "version": "4.7.3", - "resolved": "https://registry.npmjs.org/snyk-resolve-deps/-/snyk-resolve-deps-4.7.3.tgz", - "integrity": "sha512-UzPCDS4xzcSqahmTpC1o75aIX0t/1voj34X3D3yOn3FvY9doiXC02B4IILpPh75ZFtYgdqkOr7TS2lm/ltgEcg==", + "version": "4.8.0", + "resolved": "https://registry.npmjs.org/snyk-resolve-deps/-/snyk-resolve-deps-4.8.0.tgz", + "integrity": "sha512-/pXaStapn8ldr68e1Bs2gmxoQpiB3fnjfZSfzY82bxedmSKzQgTJ5vhf1P9kALj3IBEb1wYaQ/MtNH5E9DK0/g==", "requires": { "ansicolors": "^0.3.2", - "debug": "^4.1.1", - "lodash.assign": "^4.2.0", - "lodash.assignin": "^4.2.0", - "lodash.clone": "^4.5.0", - "lodash.flatten": "^4.4.0", - "lodash.get": "^4.4.2", - "lodash.set": "^4.3.2", - "lru-cache": "^4.0.0", - "semver": "^5.5.1", - "snyk-module": "^3.1.0", - "snyk-resolve": "^1.0.0", + "debug": "^4.3.4", + "lodash": "^4.17.21", + "lru-cache": "^4.1.5", + "semver": "^5.7.2", + "snyk-module": "^3.2.0", + "snyk-resolve": "^1.1.0", "snyk-tree": "^1.0.0", "snyk-try-require": "^2.0.2", "then-fs": "^2.0.0" }, "dependencies": { + "hosted-git-info": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/hosted-git-info/-/hosted-git-info-4.1.0.tgz", + "integrity": "sha512-kyCuEOWjJqZuDbRHzL8V93NzQhwIB71oFWSyzVo+KPZI+pnQPPxucdkrOZvkLRnrf5URsQM+IJ09Dw29cRALIA==", + "requires": { + "lru-cache": "^6.0.0" + }, + "dependencies": { + "lru-cache": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-6.0.0.tgz", + "integrity": "sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==", + "requires": { + "yallist": "^4.0.0" + } + }, + "yallist": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-4.0.0.tgz", + "integrity": "sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==" + } + } + }, "lru-cache": { "version": "4.1.5", "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-4.1.5.tgz", @@ -40512,6 +40580,15 @@ "resolved": "https://registry.npmjs.org/semver/-/semver-5.7.2.tgz", "integrity": "sha512-cBznnQ9KjJqU67B52RMC65CMarK2600WFnbkcaiwWq3xy/5haFJlshgnpjovMVJ+Hff49d8GEn0b87C5pDQ10g==" }, + "snyk-module": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/snyk-module/-/snyk-module-3.2.0.tgz", + "integrity": "sha512-6MLJyi4OMOZtCWTzGgRMEEw9qQ1fAwKoj5XYXfKOjIsohi3ubKsVfvSoScj0IovtiKowm2iCZ+VIRPJab6nCxA==", + "requires": { + "debug": "^4.1.1", + "hosted-git-info": "^4.0.2" + } + }, "yallist": { "version": "2.1.2", "resolved": "https://registry.npmjs.org/yallist/-/yallist-2.1.2.tgz", diff --git a/package.json b/package.json index 70f1c0b84d..3e53e3c127 100644 --- a/package.json +++ b/package.json @@ -105,7 +105,7 @@ "lodash.upperfirst": "^4.3.1", "lodash.values": "^4.3.0", "marked": "^4.0.1", - "micromatch": "4.0.7", + "micromatch": "4.0.8", "needle": "^3.3.0", "open": "^7.0.3", "ora": "5.4.0", @@ -120,14 +120,14 @@ "snyk-go-plugin": "1.23.0", "snyk-gradle-plugin": "4.1.0", "snyk-module": "3.1.0", - "snyk-mvn-plugin": "3.5.0", - "snyk-nodejs-lockfile-parser": "1.58.7", - "snyk-nodejs-plugin": "1.3.2", - "snyk-nuget-plugin": "2.7.6", + "snyk-mvn-plugin": "3.6.0", + "snyk-nodejs-lockfile-parser": "1.58.10", + "snyk-nodejs-plugin": "1.3.3", + "snyk-nuget-plugin": "2.7.8", "snyk-php-plugin": "1.9.2", - "snyk-policy": "^1.25.0", + "snyk-policy": "^4.0.0", "snyk-python-plugin": "2.2.1", - "snyk-resolve-deps": "4.7.3", + "snyk-resolve-deps": "4.8.0", "snyk-sbt-plugin": "2.18.1", "snyk-swiftpm-plugin": "1.4.1", "strip-ansi": "^6.0.1", @@ -170,7 +170,7 @@ "npm-run-all": "^4.1.5", "patch-package": "^6.5.0", "pkg": "5.8.0", - "prettier": "^1.18.2", + "prettier": "^3.3.3", "proxyquire": "^1.7.4", "sinon": "^4.0.0", "tap": "^19.0.2", @@ -202,5 +202,8 @@ "publishConfig": { "registry": "https://registry.npmjs.org/", "access": "public" + }, + "jest-junit": { + "reportTestSuiteErrors": "true" } } diff --git a/packages/iac-cli-alert/src/index.ts b/packages/iac-cli-alert/src/index.ts index 06fb89c2d1..761c738633 100644 --- a/packages/iac-cli-alert/src/index.ts +++ b/packages/iac-cli-alert/src/index.ts @@ -43,8 +43,7 @@ async function discoverConsecutiveFailures( async function sendSlackAlert() { console.log('IaC smoke tests failed. Sending Slack alert...'); const args: IncomingWebhookDefaultArguments = { - text: - 'Infrastructure as Code Smoke Tests jobs failed. \n Core functionality in the IaC+ CLI flows may not be working as expected. \n \n ', + text: 'Infrastructure as Code Smoke Tests jobs failed. \n Core functionality in the IaC+ CLI flows may not be working as expected. \n \n ', }; await slackWebhook.send(args); console.log('Slack alert sent.'); diff --git a/packages/snyk-fix/src/index.ts b/packages/snyk-fix/src/index.ts index 9f2eed3b1f..11815fdc4c 100644 --- a/packages/snyk-fix/src/index.ts +++ b/packages/snyk-fix/src/index.ts @@ -42,10 +42,8 @@ export async function fix( const spinner = ora({ isSilent: options.quiet, stream: process.stdout }); let resultsByPlugin: FixHandlerResultByPlugin = {}; - const { - vulnerable, - notVulnerable: nothingToFix, - } = await partitionByVulnerable(entities); + const { vulnerable, notVulnerable: nothingToFix } = + await partitionByVulnerable(entities); const entitiesPerType = groupEntitiesPerScanType(vulnerable); const exceptions: ErrorsByEcoSystem = {}; await pMap( @@ -94,9 +92,7 @@ export async function fix( }; } -export function groupEntitiesPerScanType( - entities: EntityToFix[], -): { +export function groupEntitiesPerScanType(entities: EntityToFix[]): { [type: string]: EntityToFix[]; } { const entitiesPerType: { diff --git a/packages/snyk-fix/src/lib/issues/fixable-issues.ts b/packages/snyk-fix/src/lib/issues/fixable-issues.ts index 942bf9100c..0557f84a4f 100644 --- a/packages/snyk-fix/src/lib/issues/fixable-issues.ts +++ b/packages/snyk-fix/src/lib/issues/fixable-issues.ts @@ -1,8 +1,6 @@ import { DependencyPins, DependencyUpdates, TestResult } from '../../types'; -export function hasFixableIssues( - results: TestResult[], -): { +export function hasFixableIssues(results: TestResult[]): { hasFixes: boolean; count: number; } { diff --git a/packages/snyk-fix/src/lib/issues/issues-by-severity.ts b/packages/snyk-fix/src/lib/issues/issues-by-severity.ts index 01c6eba474..904b0c4c8a 100644 --- a/packages/snyk-fix/src/lib/issues/issues-by-severity.ts +++ b/packages/snyk-fix/src/lib/issues/issues-by-severity.ts @@ -1,8 +1,11 @@ import { IssuesData } from '../../types'; -export function getIssueCountBySeverity( - issueData: IssuesData[], -): { low: string[]; medium: string[]; high: string[]; critical: string[] } { +export function getIssueCountBySeverity(issueData: IssuesData[]): { + low: string[]; + medium: string[]; + high: string[]; + critical: string[]; +} { const total = { low: [], medium: [], diff --git a/packages/snyk-fix/src/lib/output-formatters/show-results-summary.ts b/packages/snyk-fix/src/lib/output-formatters/show-results-summary.ts index 7ad83b0eaa..72728d00cc 100644 --- a/packages/snyk-fix/src/lib/output-formatters/show-results-summary.ts +++ b/packages/snyk-fix/src/lib/output-formatters/show-results-summary.ts @@ -25,22 +25,12 @@ export async function showResultsSummary( options: FixOptions, total: number, ): Promise { - const successfulFixesSummary = generateSuccessfulFixesSummary( - resultsByPlugin, - ); - const { - summary: unresolvedSummary, - count: unresolvedCount, - } = generateUnresolvedSummary(resultsByPlugin, exceptions); - const { - summary: overallSummary, - count: changedCount, - } = generateOverallSummary( - resultsByPlugin, - exceptions, - nothingToFix, - options, - ); + const successfulFixesSummary = + generateSuccessfulFixesSummary(resultsByPlugin); + const { summary: unresolvedSummary, count: unresolvedCount } = + generateUnresolvedSummary(resultsByPlugin, exceptions); + const { summary: overallSummary, count: changedCount } = + generateOverallSummary(resultsByPlugin, exceptions, nothingToFix, options); const getHelpText = `${reTryMessage}. ${contactSupportMessage}`; diff --git a/packages/snyk-fix/src/partition-by-vulnerable.ts b/packages/snyk-fix/src/partition-by-vulnerable.ts index 6dd7d75c5d..600a8227ed 100644 --- a/packages/snyk-fix/src/partition-by-vulnerable.ts +++ b/packages/snyk-fix/src/partition-by-vulnerable.ts @@ -1,8 +1,9 @@ import { EntityToFix } from './types'; -export function partitionByVulnerable( - entities: EntityToFix[], -): { vulnerable: EntityToFix[]; notVulnerable: EntityToFix[] } { +export function partitionByVulnerable(entities: EntityToFix[]): { + vulnerable: EntityToFix[]; + notVulnerable: EntityToFix[]; +} { const vulnerable: EntityToFix[] = []; const notVulnerable: EntityToFix[] = []; diff --git a/packages/snyk-fix/src/plugins/package-tool-supported.ts b/packages/snyk-fix/src/plugins/package-tool-supported.ts index e7e26447e8..64ac669a5b 100644 --- a/packages/snyk-fix/src/plugins/package-tool-supported.ts +++ b/packages/snyk-fix/src/plugins/package-tool-supported.ts @@ -42,9 +42,8 @@ export async function checkPackageToolSupported( return; } - const { supported, versions } = supportFunc[ - packageManager - ].isSupportedVersion(version); + const { supported, versions } = + supportFunc[packageManager].isSupportedVersion(version); if (!supported) { const spinnerMessage = ` ${version} ${packageManager} version detected. Currently the following ${packageManager} versions are supported: ${versions.join( ',', diff --git a/packages/snyk-fix/src/plugins/python/handlers/is-supported.ts b/packages/snyk-fix/src/plugins/python/handlers/is-supported.ts index dfacc00fe8..765f1c0a8b 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/is-supported.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/is-supported.ts @@ -33,9 +33,7 @@ export async function isSupported( return { supported: true }; } -export async function partitionByFixable( - entities: EntityToFix[], -): Promise<{ +export async function partitionByFixable(entities: EntityToFix[]): Promise<{ skipped: Array>; fixable: EntityToFix[]; }> { diff --git a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/extract-version-provenance.ts b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/extract-version-provenance.ts index f562b3136b..7a5c820958 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/extract-version-provenance.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/extract-version-provenance.ts @@ -31,9 +31,8 @@ export async function extractProvenance( ...provenance, [relativeTargetFileName]: parseRequirementsFile(requirementsTxt), }; - const { containsRequire, matches } = await containsRequireDirective( - requirementsTxt, - ); + const { containsRequire, matches } = + await containsRequireDirective(requirementsTxt); if (containsRequire) { for (const match of matches) { const requiredFilePath = match[2]; diff --git a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/index.ts b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/index.ts index 9111cf9c5c..5107f9190f 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/index.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/index.ts @@ -253,9 +253,7 @@ function filterOutAppliedUpgrades( return pinRemediation; } -function sortByDirectory( - entities: EntityToFix[], -): { +function sortByDirectory(entities: EntityToFix[]): { [dir: string]: Array<{ entity: EntityToFix; dir: string; @@ -274,9 +272,7 @@ function sortByDirectory( return groupBy(sorted, 'dir'); } -export async function selectFileForPinning( - entity: EntityToFix, -): Promise<{ +export async function selectFileForPinning(entity: EntityToFix): Promise<{ fileName: string; fileContent: string; }> { @@ -287,9 +283,8 @@ export async function selectFileForPinning( let fileName = base; let requirementsTxt = await workspace.readFile(targetFile); - const { containsRequire, matches } = await containsRequireDirective( - requirementsTxt, - ); + const { containsRequire, matches } = + await containsRequireDirective(requirementsTxt); const constraintsMatch = matches.filter((m) => m.includes('c')); if (containsRequire && constraintsMatch[0]) { // prefer to pin in constraints file if present diff --git a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/generate-pins.ts b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/generate-pins.ts index 2c8474e776..df2eed6de0 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/generate-pins.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/generate-pins.ts @@ -31,9 +31,8 @@ export function generatePins( const pinnedRequirements = Object.keys(standardizedPins) .map((pkgNameAtVersion) => { const [pkgName, version] = pkgNameAtVersion.split('@'); - const newVersion = standardizedPins[pkgNameAtVersion].upgradeTo.split( - '@', - )[1]; + const newVersion = + standardizedPins[pkgNameAtVersion].upgradeTo.split('@')[1]; const newRequirement = `${standardizePackageName( pkgName, )}>=${newVersion}`; diff --git a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/index.ts b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/index.ts index b8930ac9c2..c29807a40c 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/index.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/index.ts @@ -25,10 +25,8 @@ export function updateDependencies( updatedManifest: string; changes: FixChangesSummary[]; } { - const { - requirements, - endsWithNewLine: shouldEndWithNewLine, - } = parsedRequirementsData; + const { requirements, endsWithNewLine: shouldEndWithNewLine } = + parsedRequirementsData; if (!requirements.length) { debug( 'Error: Expected to receive parsed manifest data. Is manifest empty?', diff --git a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/requirements-file-parser.ts b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/requirements-file-parser.ts index ec5dd6dbef..9b7bfeee7f 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/requirements-file-parser.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pip-requirements/update-dependencies/requirements-file-parser.ts @@ -63,7 +63,8 @@ function extractDependencyDataFromLine( // Regex to match against a Python package specifier. Any invalid lines (or // lines we can't handle) should have been returned this point. - const regex = /([A-Z0-9-._]*)(!=|===|==|>=|<=|>|<|~=)(\d*\.?\d*\.?\d*[A-Z0-9]*)(.*)/i; + const regex = + /([A-Z0-9-._]*)(!=|===|==|>=|<=|>|<|~=)(\d*\.?\d*\.?\d*[A-Z0-9]*)(.*)/i; const result = regex.exec(requirementText); if (result !== null) { requirement.name = standardizePackageName(result[1]); diff --git a/packages/snyk-fix/src/plugins/python/handlers/pipenv-pipfile/update-dependencies/pipenv-add.ts b/packages/snyk-fix/src/plugins/python/handlers/pipenv-pipfile/update-dependencies/pipenv-add.ts index 9aabdd5392..c6560d5362 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/pipenv-pipfile/update-dependencies/pipenv-add.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/pipenv-pipfile/update-dependencies/pipenv-add.ts @@ -30,14 +30,10 @@ export async function pipenvAdd( const targetFilePath = pathLib.resolve(entity.workspace.path, targetFile); const { dir } = pathLib.parse(targetFilePath); if (!options.dryRun && upgrades.length) { - const { - stderr, - stdout, - command, - exitCode, - } = await pipenvPipfileFix.pipenvInstall(dir, upgrades, { - python: entity.options.command, - }); + const { stderr, stdout, command, exitCode } = + await pipenvPipfileFix.pipenvInstall(dir, upgrades, { + python: entity.options.command, + }); debug('`pipenv add` returned:', { stderr, stdout, command }); if (exitCode !== 0) { pipenvCommand = command; diff --git a/packages/snyk-fix/src/plugins/python/handlers/validate-required-data.ts b/packages/snyk-fix/src/plugins/python/handlers/validate-required-data.ts index 5da56e3769..808ab636bc 100644 --- a/packages/snyk-fix/src/plugins/python/handlers/validate-required-data.ts +++ b/packages/snyk-fix/src/plugins/python/handlers/validate-required-data.ts @@ -3,9 +3,7 @@ import { MissingFileNameError } from '../../../lib/errors/missing-file-name'; import { NoFixesCouldBeAppliedError } from '../../../lib/errors/no-fixes-applied'; import { EntityToFix, RemediationChanges, Workspace } from '../../../types'; -export function validateRequiredData( - entity: EntityToFix, -): { +export function validateRequiredData(entity: EntityToFix): { remediation: RemediationChanges; targetFile: string; workspace: Workspace; diff --git a/packages/snyk-fix/src/plugins/python/index.ts b/packages/snyk-fix/src/plugins/python/index.ts index 5a4a2695ae..901b30cdb7 100644 --- a/packages/snyk-fix/src/plugins/python/index.ts +++ b/packages/snyk-fix/src/plugins/python/index.ts @@ -30,9 +30,8 @@ export async function pythonFix( }, }; const results = handlerResult.python; - const { entitiesPerType, skipped: notSupported } = mapEntitiesPerHandlerType( - entities, - ); + const { entitiesPerType, skipped: notSupported } = + mapEntitiesPerHandlerType(entities); results.skipped.push(...notSupported); spinner.stopAndPersist({ @@ -57,9 +56,8 @@ export async function pythonFix( const handler = loadHandler(projectType as SUPPORTED_HANDLER_TYPES); // drop unsupported Python entities early so only potentially fixable items get // attempted to be fixed - const { fixable, skipped: notFixable } = await partitionByFixable( - projectsToFix, - ); + const { fixable, skipped: notFixable } = + await partitionByFixable(projectsToFix); results.skipped.push(...notFixable); const { failed, skipped, succeeded } = await handler(fixable, options); diff --git a/packages/snyk-fix/src/plugins/python/map-entities-per-handler-type.ts b/packages/snyk-fix/src/plugins/python/map-entities-per-handler-type.ts index 91ed5c2136..1809a71256 100644 --- a/packages/snyk-fix/src/plugins/python/map-entities-per-handler-type.ts +++ b/packages/snyk-fix/src/plugins/python/map-entities-per-handler-type.ts @@ -6,9 +6,7 @@ import { SUPPORTED_HANDLER_TYPES } from './supported-handler-types'; const debug = debugLib('snyk-fix:python'); -export function mapEntitiesPerHandlerType( - entities: EntityToFix[], -): { +export function mapEntitiesPerHandlerType(entities: EntityToFix[]): { skipped: Array>; entitiesPerType: { [projectType in SUPPORTED_HANDLER_TYPES]: EntityToFix[]; diff --git a/packages/snyk-fix/test/acceptance/plugins/python/handlers/pipenv-pipfile/update-dependencies.spec.ts b/packages/snyk-fix/test/acceptance/plugins/python/handlers/pipenv-pipfile/update-dependencies.spec.ts index 434eaae7a5..18fa23d23c 100644 --- a/packages/snyk-fix/test/acceptance/plugins/python/handlers/pipenv-pipfile/update-dependencies.spec.ts +++ b/packages/snyk-fix/test/acceptance/plugins/python/handlers/pipenv-pipfile/update-dependencies.spec.ts @@ -164,8 +164,7 @@ describe('fix Pipfile Python projects', () => { issueIds: ['vuln-id'], reason: 'Locking failed', success: false, - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', to: 'django@2.0.1', userMessage: 'Failed to upgrade django from 1.6.1 to 2.0.1', }, @@ -174,8 +173,7 @@ describe('fix Pipfile Python projects', () => { issueIds: [], reason: 'Locking failed', success: false, - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', to: 'transitive@1.1.1', userMessage: 'Failed to pin transitive from 1.0.0 to 1.1.1', }, @@ -263,8 +261,7 @@ describe('fix Pipfile Python projects', () => { success: false, reason: err, userMessage: 'Failed to upgrade django from 1.6.1 to 2.0.1', - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', issueIds: ['vuln-id'], from: 'django@1.6.1', to: 'django@2.0.1', @@ -273,8 +270,7 @@ describe('fix Pipfile Python projects', () => { success: false, reason: err, userMessage: 'Failed to pin transitive from 1.0.0 to 1.1.1', - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', issueIds: [], from: 'transitive@1.0.0', to: 'transitive@1.1.1', @@ -716,8 +712,7 @@ describe('fix Pipfile Python projects (fix sequentially)', () => { issueIds: ['vuln-id'], reason: err, success: false, - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', to: 'django@2.0.1', userMessage: 'Failed to upgrade django from 1.6.1 to 2.0.1', }, @@ -726,8 +721,7 @@ describe('fix Pipfile Python projects (fix sequentially)', () => { issueIds: [], reason: err, success: false, - tip: - 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', + tip: 'Try running `pipenv install django==2.0.1 transitive==1.1.1`', to: 'transitive@1.1.1', userMessage: 'Failed to pin transitive from 1.0.0 to 1.1.1', }, diff --git a/packages/snyk-fix/test/acceptance/plugins/python/handlers/poetry/update-dependencies.spec.ts b/packages/snyk-fix/test/acceptance/plugins/python/handlers/poetry/update-dependencies.spec.ts index 195f7a3433..5ce5b18bfe 100644 --- a/packages/snyk-fix/test/acceptance/plugins/python/handlers/poetry/update-dependencies.spec.ts +++ b/packages/snyk-fix/test/acceptance/plugins/python/handlers/poetry/update-dependencies.spec.ts @@ -170,8 +170,7 @@ describe('fix Poetry Python projects', () => { issueIds: ['VULN-six'], reason: err, success: false, - tip: - 'Try running `poetry install six==2.0.1 transitive==1.1.1`', + tip: 'Try running `poetry install six==2.0.1 transitive==1.1.1`', to: 'six@2.0.1', userMessage: 'Failed to upgrade six from 1.1.6 to 2.0.1', }, @@ -180,8 +179,7 @@ describe('fix Poetry Python projects', () => { issueIds: [], reason: err, success: false, - tip: - 'Try running `poetry install six==2.0.1 transitive==1.1.1`', + tip: 'Try running `poetry install six==2.0.1 transitive==1.1.1`', to: 'transitive@1.1.1', userMessage: 'Failed to pin transitive from 1.0.0 to 1.1.1', }, diff --git a/packages/snyk-fix/test/helpers/generate-entity-to-fix.ts b/packages/snyk-fix/test/helpers/generate-entity-to-fix.ts index fb5a7a9192..1ef8664e32 100644 --- a/packages/snyk-fix/test/helpers/generate-entity-to-fix.ts +++ b/packages/snyk-fix/test/helpers/generate-entity-to-fix.ts @@ -25,7 +25,7 @@ export function generateEntityToFix( : { issues: [], issuesData: {}, - depGraphData: ('' as unknown) as DepGraphData, + depGraphData: '' as unknown as DepGraphData, }; const workspace = generateWorkspace(contents, path); const cliTestOptions = { @@ -112,7 +112,7 @@ export function generateTestResult(): TestResult { title: 'Fake vuln', }, }, - depGraphData: ('' as unknown) as DepGraphData, + depGraphData: '' as unknown as DepGraphData, remediation: { unresolved: [], upgrade: {}, diff --git a/packages/snyk-fix/test/unit/lib/issues/fixable-issues.spec.ts b/packages/snyk-fix/test/unit/lib/issues/fixable-issues.spec.ts index 3e7de595c7..9743fa10e5 100644 --- a/packages/snyk-fix/test/unit/lib/issues/fixable-issues.spec.ts +++ b/packages/snyk-fix/test/unit/lib/issues/fixable-issues.spec.ts @@ -7,7 +7,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: { @@ -39,7 +39,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: {}, @@ -71,7 +71,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: {}, @@ -103,7 +103,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: { @@ -146,7 +146,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: {}, @@ -169,7 +169,7 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: {}, @@ -202,12 +202,12 @@ describe('hasFixableIssues', () => { { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, }, { issues: [], issuesData: {}, - depGraphData: ({} as unknown) as DepGraphData, + depGraphData: {} as unknown as DepGraphData, remediation: { ignore: {}, patch: {}, diff --git a/packages/snyk-protect/src/lib/index.ts b/packages/snyk-protect/src/lib/index.ts index c9ef95bc79..ad83ca463a 100644 --- a/packages/snyk-protect/src/lib/index.ts +++ b/packages/snyk-protect/src/lib/index.ts @@ -61,10 +61,8 @@ async function protect(projectFolderPath: string) { } }); - const packageAtVersionsToPatches: Map< - string, - VulnPatches[] - > = await getAllPatches(vulnIdAndPackageNames, packageNameToVersionsMap); + const packageAtVersionsToPatches: Map = + await getAllPatches(vulnIdAndPackageNames, packageNameToVersionsMap); if (packageAtVersionsToPatches.size === 0) { console.log('Nothing to patch.'); @@ -77,9 +75,8 @@ async function protect(projectFolderPath: string) { const patchedModules: PatchedModule[] = []; foundPhysicalPackages.forEach((fpp) => { const packageNameAtVersion = `${fpp.packageName}@${fpp.packageVersion}`; - const vuldIdAndPatches = packageAtVersionsToPatches.get( - packageNameAtVersion, - ); + const vuldIdAndPatches = + packageAtVersionsToPatches.get(packageNameAtVersion); vuldIdAndPatches?.forEach((vp) => { vp.patches.forEach((patchDiffs) => { patchDiffs.patchDiffs.forEach((diff) => { diff --git a/packages/snyk-protect/test/util/runCommand.ts b/packages/snyk-protect/test/util/runCommand.ts index c4238e3a5d..e0bd3e81d7 100644 --- a/packages/snyk-protect/test/util/runCommand.ts +++ b/packages/snyk-protect/test/util/runCommand.ts @@ -34,12 +34,8 @@ const runCommand = ( cli.on('close', (code) => { resolve({ code: code || 0, - stdout: Buffer.concat(stdout) - .toString('utf-8') - .trim(), - stderr: Buffer.concat(stderr) - .toString('utf-8') - .trim(), + stdout: Buffer.concat(stdout).toString('utf-8').trim(), + stderr: Buffer.concat(stderr).toString('utf-8').trim(), }); }); }); diff --git a/release-scripts/create-jira-release.sh b/release-scripts/create-jira-release.sh new file mode 100755 index 0000000000..69dc29d84f --- /dev/null +++ b/release-scripts/create-jira-release.sh @@ -0,0 +1,16 @@ +#!/usr/bin/env bash +set -euox pipefail + +curl --fail --request POST \ + --url 'https://snyksec.atlassian.net/rest/api/3/version' \ + --user "$JIRA_USER_EMAIL:$JIRA_TOKEN" \ + --header 'Accept: application/json' \ + --header 'Content-Type: application/json' \ + --data "{ + \"archived\": false, + \"description\": \"https://github.com/snyk/cli/releases/tag/v$(cat binary-releases/version)\", + \"name\": \"$(cat binary-releases/version)\", + \"projectId\": 11104, + \"releaseDate\": \"$(date +%Y-%m-%d)\", + \"released\": true +}" diff --git a/release-scripts/release.json b/release-scripts/release.json index 101741e724..b872c806d5 100644 --- a/release-scripts/release.json +++ b/release-scripts/release.json @@ -6,6 +6,11 @@ "sha256": "snyk-alpine-sha256", "sha256Url": "https://downloads.snyk.io/cli/v1.0.0-monorepo/snyk-alpine.sha256" }, + "snyk-alpine-arm64": { + "url": "https://downloads.snyk.io/cli/v1.0.0-monorepo/snyk-alpine-arm64", + "sha256": "snyk-alpine-arm64-sha256", + "sha256Url": "https://downloads.snyk.io/cli/v1.0.0-monorepo/snyk-alpine-arm64.sha256" + }, "snyk-linux": { "url": "https://downloads.snyk.io/cli/v1.0.0-monorepo/snyk-linux", "sha256": "snyk-linux-sha256", diff --git a/release-scripts/upload-artifacts.sh b/release-scripts/upload-artifacts.sh index d3286dc110..29b5406362 100755 --- a/release-scripts/upload-artifacts.sh +++ b/release-scripts/upload-artifacts.sh @@ -4,31 +4,33 @@ set -euo pipefail PROTOCOL_VERSION_FILE=$(basename "$(/bin/ls binary-releases/ls-protocol-version*)") declare -a StaticFiles=( - "binary-releases/$PROTOCOL_VERSION_FILE" "binary-releases/snyk-alpine" - "binary-releases/snyk-linux" - "binary-releases/snyk-linux-arm64" - "binary-releases/snyk-macos" - "binary-releases/snyk-macos-arm64" - "binary-releases/snyk-win.exe" "binary-releases/snyk-alpine.sha256" + "binary-releases/snyk-alpine-arm64" + "binary-releases/snyk-alpine-arm64.sha256" + "binary-releases/snyk-linux" "binary-releases/snyk-linux.sha256" + "binary-releases/snyk-linux-arm64" "binary-releases/snyk-linux-arm64.sha256" + "binary-releases/snyk-macos" "binary-releases/snyk-macos.sha256" + "binary-releases/snyk-macos-arm64" "binary-releases/snyk-macos-arm64.sha256" + "binary-releases/snyk-win.exe" "binary-releases/snyk-win.exe.sha256" "binary-releases/sha256sums.txt.asc" + "binary-releases/$PROTOCOL_VERSION_FILE" ) declare -a StaticFilesFIPS=( - "binary-releases/fips/$PROTOCOL_VERSION_FILE" "binary-releases/fips/snyk-linux" - "binary-releases/fips/snyk-linux-arm64" - "binary-releases/fips/snyk-win.exe" "binary-releases/fips/snyk-linux.sha256" + "binary-releases/fips/snyk-linux-arm64" "binary-releases/fips/snyk-linux-arm64.sha256" + "binary-releases/fips/snyk-win.exe" "binary-releases/fips/snyk-win.exe.sha256" "binary-releases/fips/sha256sums.txt.asc" + "binary-releases/fips/$PROTOCOL_VERSION_FILE" ) VERSION_TAG="v$(cat binary-releases/version)" diff --git a/src/cli/args.ts b/src/cli/args.ts index d95d88d7b7..19697c5063 100644 --- a/src/cli/args.ts +++ b/src/cli/args.ts @@ -200,9 +200,9 @@ export function args(rawArgv: string[]): Args { } // TODO: eventually all arguments should be transformed like this. - const argumentsToTransform: Array> = [ + const argumentsToTransform: Array< + Partial + > = [ 'package-manager', 'packages-folder', 'severity-threshold', diff --git a/src/cli/commands/about.ts b/src/cli/commands/about.ts index 59513be71f..6bc1e58d59 100644 --- a/src/cli/commands/about.ts +++ b/src/cli/commands/about.ts @@ -11,8 +11,9 @@ export default function about(): void { `${licenseNotice.name} \u00B7 ${licenseNotice.version} \u00B7 ${licenseNotice.license}`, ); console.log( - `Author(s): ${licenseNotice.author || - 'Not filled'} \u00B7 Package: ${licenseNotice.source || ''}`, + `Author(s): ${ + licenseNotice.author || 'Not filled' + } \u00B7 Package: ${licenseNotice.source || ''}`, ); console.log(`${licenseNotice.licenseText || ''}`); // WTFPL is not required the embed its license text console.log('\n+-+-+-+-+-+-+'); diff --git a/src/cli/commands/fix/convert-legacy-test-result-to-new.ts b/src/cli/commands/fix/convert-legacy-test-result-to-new.ts index b51b5b49ac..69bfdbf150 100644 --- a/src/cli/commands/fix/convert-legacy-test-result-to-new.ts +++ b/src/cli/commands/fix/convert-legacy-test-result-to-new.ts @@ -5,9 +5,7 @@ import { TestResult as LegacyTestResult, } from '../../../lib/snyk-test/legacy'; -function convertVulnerabilities( - vulns: AnnotatedIssue[], -): { +function convertVulnerabilities(vulns: AnnotatedIssue[]): { issuesData: IssuesData; issues: Issue[]; } { diff --git a/src/cli/commands/fix/index.ts b/src/cli/commands/fix/index.ts index 8be98702dc..f1656a7edf 100644 --- a/src/cli/commands/fix/index.ts +++ b/src/cli/commands/fix/index.ts @@ -52,14 +52,15 @@ export default async function fix(...args: MethodArgs): Promise { (res) => Object.keys(res.testResult.issues).length, ); const { dryRun, quiet, sequential: sequentialFix } = options; - const { fixSummary, meta, results: resultsByPlugin } = await snykFix.fix( - results, - { - dryRun, - quiet, - sequentialFix, - }, - ); + const { + fixSummary, + meta, + results: resultsByPlugin, + } = await snykFix.fix(results, { + dryRun, + quiet, + sequentialFix, + }); setSnykFixAnalytics( fixSummary, diff --git a/src/cli/commands/ignore.ts b/src/cli/commands/ignore.ts index a6f11d2ed2..8600e5ea12 100644 --- a/src/cli/commands/ignore.ts +++ b/src/cli/commands/ignore.ts @@ -4,7 +4,7 @@ import * as authorization from '../../lib/authorization'; import * as auth from './auth/is-authed'; import { apiTokenExists } from '../../lib/api-token'; import { isCI } from '../../lib/is-ci'; -import { IgnoreRules, MethodResult } from './types'; +import { MethodResult } from './types'; import * as Debug from 'debug'; const debug = Debug('snyk'); @@ -80,7 +80,7 @@ export function ignoreIssue(options): Promise { created: new Date(), }; - const ignoreRules: IgnoreRules = pol.ignore; + const ignoreRules = pol.ignore; const issueIgnorePaths = ignoreRules[options.id] ?? []; diff --git a/src/cli/commands/monitor/index.ts b/src/cli/commands/monitor/index.ts index 4a60e28729..a242792e94 100644 --- a/src/cli/commands/monitor/index.ts +++ b/src/cli/commands/monitor/index.ts @@ -50,6 +50,7 @@ import { getFormattedMonitorOutput } from '../../../lib/ecosystems/monitor'; import { processCommandArgs } from '../process-command-args'; import { hasFeatureFlag } from '../../../lib/feature-flags'; import { PNPM_FEATURE_FLAG } from '../../../lib/package-managers'; +import { normalizeTargetFile } from '../../../lib/normalize-target-file'; const SEPARATOR = '\n-------------------------------------------------------\n'; const debug = Debug('snyk'); @@ -294,12 +295,15 @@ export default async function monitor(...args0: MethodArgs): Promise { maybePrintDepTree(options, projectDeps.depTree); } - const tFile = projectDeps.targetFile || targetFile; - const targetFileRelativePath = - projectDeps.plugin.targetFile || - (tFile && pathUtil.join(pathUtil.resolve(path), tFile)) || - ''; + const tFile = normalizeTargetFile( + projectDeps, + projectDeps.plugin, + targetFile, + ); + const targetFileRelativePath = tFile + ? pathUtil.resolve(pathUtil.resolve(path), tFile) + : ''; const res: MonitorResult = await promiseOrCleanup( snykMonitor( path, @@ -353,8 +357,8 @@ export default async function monitor(...args0: MethodArgs): Promise { res.data && res.data.userMessage ? chalk.bold.red(res.data.userMessage) : res.data - ? res.data.message - : 'Unknown error occurred.'; + ? res.data.message + : 'Unknown error occurred.'; return ( chalk.bold.white('\nMonitoring ' + res.path + '...\n\n') + errorMessage diff --git a/src/cli/commands/policy.ts b/src/cli/commands/policy.ts index 31293f67ea..2d38c66021 100644 --- a/src/cli/commands/policy.ts +++ b/src/cli/commands/policy.ts @@ -8,9 +8,7 @@ import { export default async function displayPolicy(path?: string): Promise { try { - const loadedPolicy = (await policy.load(path || process.cwd())) as Promise< - string - >; + const loadedPolicy = await policy.load(path || process.cwd()); return await display(loadedPolicy); } catch (error) { let adaptedError: CustomError; diff --git a/src/cli/commands/process-command-args.ts b/src/cli/commands/process-command-args.ts index f3a7c815cd..5bbe3ba932 100644 --- a/src/cli/commands/process-command-args.ts +++ b/src/cli/commands/process-command-args.ts @@ -1,12 +1,13 @@ import { Options } from '../../lib/types'; -export function processCommandArgs( - ...args -): { paths: string[]; options: Options & CommandOptions } { - let options = ({} as any) as Options & CommandOptions; +export function processCommandArgs(...args): { + paths: string[]; + options: Options & CommandOptions; +} { + let options = {} as any as Options & CommandOptions; if (typeof args[args.length - 1] === 'object') { - options = (args.pop() as any) as Options & CommandOptions; + options = args.pop() as any as Options & CommandOptions; } args = args.filter(Boolean); diff --git a/src/cli/commands/test/iac/index.ts b/src/cli/commands/test/iac/index.ts index 74edfebaa6..31b16f5e06 100644 --- a/src/cli/commands/test/iac/index.ts +++ b/src/cli/commands/test/iac/index.ts @@ -16,7 +16,9 @@ import { buildOutput, buildSpinner, printHeader } from './output'; import { InvalidArgumentError } from './local-execution/assert-iac-options-flag'; import { IaCTestFlags } from './local-execution/types'; -export default async function(...args: MethodArgs): Promise { +export default async function ( + ...args: MethodArgs +): Promise { const { options: originalOptions, paths } = processCommandArgs(...args); const options = setDefaultTestOptions(originalOptions); @@ -48,22 +50,18 @@ export default async function(...args: MethodArgs): Promise { printHeader(options); - const { - iacOutputMeta, - iacScanFailures, - iacIgnoredIssuesCount, - results, - } = await scan( - iacOrgSettings, - options, - testSpinner, - paths, - orgPublicId, - buildOciRegistry, - projectRoot, - remoteRepoUrl, - targetName, - ); + const { iacOutputMeta, iacScanFailures, iacIgnoredIssuesCount, results } = + await scan( + iacOrgSettings, + options, + testSpinner, + paths, + orgPublicId, + buildOciRegistry, + projectRoot, + remoteRepoUrl, + targetName, + ); return buildOutput({ results, diff --git a/src/cli/commands/test/iac/local-execution/file-parser.ts b/src/cli/commands/test/iac/local-execution/file-parser.ts index c6ec787b2a..e4127744b7 100644 --- a/src/cli/commands/test/iac/local-execution/file-parser.ts +++ b/src/cli/commands/test/iac/local-execution/file-parser.ts @@ -46,10 +46,8 @@ export async function parseFiles( ); if (tfFileData.length > 0) { - const { - parsedFiles: parsedTfFiles, - failedFiles: failedTfFiles, - } = parseTerraformFiles(tfFileData); + const { parsedFiles: parsedTfFiles, failedFiles: failedTfFiles } = + parseTerraformFiles(tfFileData); parsedFiles = parsedFiles.concat(parsedTfFiles); failedFiles = failedFiles.concat(failedTfFiles); } diff --git a/src/cli/commands/test/iac/local-execution/file-scanner.ts b/src/cli/commands/test/iac/local-execution/file-scanner.ts index 9c1ba083f0..399d129631 100644 --- a/src/cli/commands/test/iac/local-execution/file-scanner.ts +++ b/src/cli/commands/test/iac/local-execution/file-scanner.ts @@ -14,9 +14,7 @@ import { getErrorStringCode } from './error-utils'; import { IacFileInDirectory } from '../../../../../lib/types'; import { SEVERITIES } from '../../../../../lib/snyk-test/common'; -export async function scanFiles( - parsedFiles: Array, -): Promise<{ +export async function scanFiles(parsedFiles: Array): Promise<{ scannedFiles: IacFileScanResult[]; failedScans: IacFileInDirectory[]; }> { @@ -27,9 +25,8 @@ export async function scanFiles( const policyEngine = await getPolicyEngine(parsedFile.engineType); const result = policyEngine.scanFile(parsedFile); if (parsedFile.engineType === EngineType.Custom) { - const { validatedResult, invalidIssues } = validateResultFromCustomRules( - result, - ); + const { validatedResult, invalidIssues } = + validateResultFromCustomRules(result); validatedResult.violatedPolicies.forEach((policy) => { // custom rules will have a remediation field that is a string, so we need to map it to the resolve field. if (typeof policy.remediation === 'string') { @@ -54,9 +51,7 @@ async function getPolicyEngine(engineType: EngineType): Promise { return policyEngineCache[engineType]!; } -export function validateResultFromCustomRules( - result: IacFileScanResult, -): { +export function validateResultFromCustomRules(result: IacFileScanResult): { validatedResult: IacFileScanResult; invalidIssues: IacFileInDirectory[]; } { @@ -124,10 +119,8 @@ let policyEngineCache: { [key in EngineType]: PolicyEngine | null } = { async function buildPolicyEngine( engineType: EngineType, ): Promise { - const [ - policyEngineCoreDataPath, - policyEngineMetaDataPath, - ] = getLocalCachePath(engineType); + const [policyEngineCoreDataPath, policyEngineMetaDataPath] = + getLocalCachePath(engineType); try { const wasmFile = fs.readFileSync(policyEngineCoreDataPath); diff --git a/src/cli/commands/test/iac/local-execution/file-utils.ts b/src/cli/commands/test/iac/local-execution/file-utils.ts index 72e1dfe400..99d2a3f1b2 100644 --- a/src/cli/commands/test/iac/local-execution/file-utils.ts +++ b/src/cli/commands/test/iac/local-execution/file-utils.ts @@ -11,10 +11,7 @@ import { readdirSync } from 'fs'; import { join } from 'path'; function hashData(s: string): string { - const hashedData = crypto - .createHash('sha1') - .update(s) - .digest('hex'); + const hashedData = crypto.createHash('sha1').update(s).digest('hex'); return hashedData; } diff --git a/src/cli/commands/test/iac/local-execution/index.ts b/src/cli/commands/test/iac/local-execution/index.ts index b9a4cdbd9b..97a4d081ab 100644 --- a/src/cli/commands/test/iac/local-execution/index.ts +++ b/src/cli/commands/test/iac/local-execution/index.ts @@ -134,7 +134,7 @@ export async function test( // TODO: add support for proper typing of old TestResult interface. return { - results: (filteredIssues as unknown) as TestResult[], + results: filteredIssues as unknown as TestResult[], failures, ignoreCount, }; diff --git a/src/cli/commands/test/iac/local-execution/local-cache.ts b/src/cli/commands/test/iac/local-execution/local-cache.ts index 62858c1935..3962bda4d3 100644 --- a/src/cli/commands/test/iac/local-execution/local-cache.ts +++ b/src/cli/commands/test/iac/local-execution/local-cache.ts @@ -14,9 +14,7 @@ import envPaths from 'env-paths'; const debug = Debug('iac-local-cache'); const cachePath = config.CACHE_PATH ?? envPaths('snyk').cache; -const uuid = Math.random() - .toString(36) - .substring(2); +const uuid = Math.random().toString(36).substring(2); export const LOCAL_POLICY_ENGINE_DIR = cachePath + '/iac-data/' + uuid; const KUBERNETES_POLICY_ENGINE_WASM_PATH = path.join( @@ -144,7 +142,7 @@ export async function initLocalCache({ // We extract the Snyk rules after the custom rules to ensure our files // always overwrite whatever might be there. try { - const BUNDLE_URL = 'https://static.snyk.io/cli/wasm/bundle.tar.gz'; + const BUNDLE_URL = 'https://downloads.snyk.io/cli/wasm/bundle.tar.gz'; const response = await streamRequest({ method: 'get', url: BUNDLE_URL, @@ -187,7 +185,7 @@ export class FailedToDownloadRulesError extends CustomError { this.code = IaCErrorCodes.FailedToDownloadRulesError; this.strCode = getErrorStringCode(this.code); this.userMessage = - 'We were unable to download the security rules, please ensure the network can access https://static.snyk.io'; + 'We were unable to download the security rules, please ensure the network can access https://downloads.snyk.io'; } } diff --git a/src/cli/commands/test/iac/local-execution/measurable-methods.ts b/src/cli/commands/test/iac/local-execution/measurable-methods.ts index 313f40f5b3..72dce0a248 100644 --- a/src/cli/commands/test/iac/local-execution/measurable-methods.ts +++ b/src/cli/commands/test/iac/local-execution/measurable-methods.ts @@ -18,12 +18,12 @@ type Awaited = T extends PromiseLike ? U : T; // the compiler to be happy, so we need to unwrap it with the messy // Awaiter> rather than just using ReturnType directly. export function asyncPerformanceAnalyticsDecorator< - T extends (...args: any[]) => Promise + T extends (...args: any[]) => Promise, >( measurableMethod: T, analyticsKey: PerformanceAnalyticsKey, ): (...args: Parameters) => Promise>> { - return async function(...args) { + return async function (...args) { const startTime = Date.now(); const returnValue = await measurableMethod(...args); const durationMs = Date.now() - startTime; @@ -33,12 +33,12 @@ export function asyncPerformanceAnalyticsDecorator< } export function performanceAnalyticsDecorator< - T extends (...args: any[]) => any + T extends (...args: any[]) => any, >( measurableMethod: T, analyticsKey: PerformanceAnalyticsKey, ): (...args: Parameters) => ReturnType { - return function(...args) { + return function (...args) { const startTime = Date.now(); const returnValue = measurableMethod(...args); const durationMs = Date.now() - startTime; diff --git a/src/cli/commands/test/iac/local-execution/parsers/terraform-plan-parser.ts b/src/cli/commands/test/iac/local-execution/parsers/terraform-plan-parser.ts index f739c82c33..e1e3c82e1d 100644 --- a/src/cli/commands/test/iac/local-execution/parsers/terraform-plan-parser.ts +++ b/src/cli/commands/test/iac/local-execution/parsers/terraform-plan-parser.ts @@ -118,9 +118,8 @@ function referencedResourcesResolver( resolvedResource[key] = resourceExpressions[key]; } } - scanInput[inputKey][type][ - getResourceName(index, name) - ] = resolvedResource; + scanInput[inputKey][type][getResourceName(index, name)] = + resolvedResource; } } diff --git a/src/cli/commands/test/iac/local-execution/process-results/cli-share-results.ts b/src/cli/commands/test/iac/local-execution/process-results/cli-share-results.ts index 847582bc7a..32bd31a820 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/cli-share-results.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/cli-share-results.ts @@ -7,7 +7,7 @@ import { ShareResultsOutput, } from '../types'; import { convertIacResultToScanResult } from '../../../../../../lib/iac/envelope-formatters'; -import { Policy } from '../../../../../../lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { Contributor, IacOutputMeta, diff --git a/src/cli/commands/test/iac/local-execution/process-results/index.ts b/src/cli/commands/test/iac/local-execution/process-results/index.ts index e2d026f05a..56d2029e53 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/index.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/index.ts @@ -1,4 +1,4 @@ -import { Policy } from '../../../../../../lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { IacOutputMeta, ProjectAttributes, diff --git a/src/cli/commands/test/iac/local-execution/process-results/policy.ts b/src/cli/commands/test/iac/local-execution/process-results/policy.ts index d55a1e779a..caf783b869 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/policy.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/policy.ts @@ -1,5 +1,5 @@ import { FormattedResult, PolicyMetadata } from '../types'; -import { Policy } from '../../../../../../lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; export function filterIgnoredIssues( policy: Policy | undefined, diff --git a/src/cli/commands/test/iac/local-execution/process-results/process-results.ts b/src/cli/commands/test/iac/local-execution/process-results/process-results.ts index f0e5f6a976..ae1b3b26c8 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/process-results.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/process-results.ts @@ -2,7 +2,7 @@ import { filterIgnoredIssues } from './policy'; import { formatAndShareResults } from './share-results'; import { formatScanResults } from '../measurable-methods'; import * as cloneDeep from 'lodash.clonedeep'; -import { Policy } from '../../../../../../lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { IacOutputMeta, ProjectAttributes, diff --git a/src/cli/commands/test/iac/local-execution/process-results/results-formatter.ts b/src/cli/commands/test/iac/local-execution/process-results/results-formatter.ts index 17d9832891..70cc3709f0 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/results-formatter.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/results-formatter.ts @@ -31,20 +31,23 @@ export function formatScanResults( gitRemoteUrl?: string, ): FormattedResult[] { try { - const groupedByFile = scanResults.reduce((memo, scanResult) => { - const res = formatScanResult(scanResult, meta, options, projectRoot); - - if (memo[scanResult.filePath]) { - memo[scanResult.filePath].result.cloudConfigResults.push( - ...res.result.cloudConfigResults, - ); - } else { - res.meta.gitRemoteUrl = gitRemoteUrl; - res.meta.projectId = projectPublicIds[res.targetFile]; - memo[scanResult.filePath] = res; - } - return memo; - }, {} as { [key: string]: FormattedResult }); + const groupedByFile = scanResults.reduce( + (memo, scanResult) => { + const res = formatScanResult(scanResult, meta, options, projectRoot); + + if (memo[scanResult.filePath]) { + memo[scanResult.filePath].result.cloudConfigResults.push( + ...res.result.cloudConfigResults, + ); + } else { + res.meta.gitRemoteUrl = gitRemoteUrl; + res.meta.projectId = projectPublicIds[res.targetFile]; + memo[scanResult.filePath] = res; + } + return memo; + }, + {} as { [key: string]: FormattedResult }, + ); return Object.values(groupedByFile); } catch (e) { throw new FailedToFormatResults(); diff --git a/src/cli/commands/test/iac/local-execution/process-results/share-results-formatter.ts b/src/cli/commands/test/iac/local-execution/process-results/share-results-formatter.ts index d9f264940b..13ac834b9d 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/share-results-formatter.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/share-results-formatter.ts @@ -30,19 +30,22 @@ export function formatShareResults( } function groupByFilePath(scanResults: IacFileScanResult[]) { - const groupedByFilePath = scanResults.reduce((memo, scanResult) => { - scanResult.violatedPolicies.forEach((violatedPolicy) => { - violatedPolicy.docId = scanResult.docId; - }); - if (memo[scanResult.filePath]) { - memo[scanResult.filePath].violatedPolicies.push( - ...scanResult.violatedPolicies, - ); - } else { - memo[scanResult.filePath] = scanResult; - } - return memo; - }, {} as Record); + const groupedByFilePath = scanResults.reduce( + (memo, scanResult) => { + scanResult.violatedPolicies.forEach((violatedPolicy) => { + violatedPolicy.docId = scanResult.docId; + }); + if (memo[scanResult.filePath]) { + memo[scanResult.filePath].violatedPolicies.push( + ...scanResult.violatedPolicies, + ); + } else { + memo[scanResult.filePath] = scanResult; + } + return memo; + }, + {} as Record, + ); return Object.values(groupedByFilePath); } diff --git a/src/cli/commands/test/iac/local-execution/process-results/share-results.ts b/src/cli/commands/test/iac/local-execution/process-results/share-results.ts index 6b12f754c7..8e2ad9fb97 100644 --- a/src/cli/commands/test/iac/local-execution/process-results/share-results.ts +++ b/src/cli/commands/test/iac/local-execution/process-results/share-results.ts @@ -1,6 +1,6 @@ import { isFeatureFlagSupportedForOrg } from '../../../../../../lib/feature-flags'; import { shareResults } from './cli-share-results'; -import { Policy } from '../../../../../../lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { IacOutputMeta, ProjectAttributes, diff --git a/src/cli/commands/test/iac/v2/index.ts b/src/cli/commands/test/iac/v2/index.ts index c237a3709c..ecc8480fe7 100644 --- a/src/cli/commands/test/iac/v2/index.ts +++ b/src/cli/commands/test/iac/v2/index.ts @@ -15,9 +15,10 @@ import { assertIacV2Options } from './assert-iac-options'; export async function test( paths: string[], options: IaCTestFlags, + iacNewEngine?: boolean, ): Promise { assertIacV2Options(options); - const testConfig = await prepareTestConfig(paths, options); + const testConfig = await prepareTestConfig(paths, options, iacNewEngine); const testSpinner = buildSpinner(options); @@ -41,6 +42,7 @@ export async function test( async function prepareTestConfig( paths: string[], options: IaCTestFlags, + iacNewEngine?: boolean, ): Promise { const iacCachePath = pathLib.join(systemCachePath, 'iac'); @@ -86,5 +88,6 @@ async function prepareTestConfig( org, customRules, experimental, + iacNewEngine, }; } diff --git a/src/cli/commands/test/index.ts b/src/cli/commands/test/index.ts index fb638f3c3f..39a28b9f32 100644 --- a/src/cli/commands/test/index.ts +++ b/src/cli/commands/test/index.ts @@ -64,10 +64,16 @@ export default async function test( const { options: originalOptions, paths } = processCommandArgs(...args); const options = setDefaultTestOptions(originalOptions); + if (originalOptions.iac) { + const iacNewEngine = await hasFeatureFlag('iacNewEngine', options); + const iacIntegratedExperience = await hasFeatureFlag( + 'iacIntegratedExperience', + options, + ); // temporary placeholder for the "new" flow that integrates with UPE - if (await hasFeatureFlag('iacIntegratedExperience', options)) { - return await iacTestCommandV2.test(paths, originalOptions); + if (iacIntegratedExperience || iacNewEngine) { + return await iacTestCommandV2.test(paths, originalOptions, iacNewEngine); } else { return await iacTestCommand(...args); } @@ -80,10 +86,8 @@ export default async function test( validateTestOptions(options); validateCredentials(options); - const packageJsonPathsWithSnykDepForProtect: string[] = getPackageJsonPathsContainingSnykDependency( - options.file, - paths, - ); + const packageJsonPathsWithSnykDepForProtect: string[] = + getPackageJsonPathsContainingSnykDependency(options.file, paths); analytics.add( 'upgradable-snyk-protect-paths', diff --git a/src/cli/commands/test/set-default-test-options.ts b/src/cli/commands/test/set-default-test-options.ts index 62504a31d0..e86b5c9cdf 100644 --- a/src/cli/commands/test/set-default-test-options.ts +++ b/src/cli/commands/test/set-default-test-options.ts @@ -9,12 +9,15 @@ export function setDefaultTestOptions( .toLowerCase(); delete options['show-vulnerable-paths']; + const showVulnPaths = showVulnPathsMapping[svpSupplied] || 'some'; + const maxVulnPaths = options['max-vulnerable-paths']; return { ...options, // org fallback to config unless specified org: options.org || config.org, // making `show-vulnerable-paths` 'some' by default. - showVulnPaths: showVulnPathsMapping[svpSupplied] || 'some', + showVulnPaths, + maxVulnPaths, }; } diff --git a/src/cli/commands/types.ts b/src/cli/commands/types.ts index 9d27b3257f..1a0a3e5452 100644 --- a/src/cli/commands/types.ts +++ b/src/cli/commands/types.ts @@ -134,7 +134,3 @@ export interface IgnoreMetadata { export interface IgnoreRulePathData { [path: string]: IgnoreMetadata; } - -export interface IgnoreRules { - [issueId: string]: IgnoreRulePathData[]; -} diff --git a/src/cli/commands/update-exclude-policy.ts b/src/cli/commands/update-exclude-policy.ts index 33c9073ab8..882fda302f 100644 --- a/src/cli/commands/update-exclude-policy.ts +++ b/src/cli/commands/update-exclude-policy.ts @@ -10,7 +10,6 @@ import { parseDriftAnalysisResults, updateExcludeInPolicy, } from '../../lib/iac/drift'; -import { Policy } from '../../lib/policy/find-and-load-policy'; import * as analytics from '../../lib/analytics'; export default async (...args: MethodArgs): Promise => { @@ -42,7 +41,7 @@ export default async (...args: MethodArgs): Promise => { // Add analytics analytics.add('is-iac-drift', true); - let policy: Policy; + let policy: snykPolicyLib.Policy; try { policy = await snykPolicyLib.load(); } catch (error) { diff --git a/src/cli/main.ts b/src/cli/main.ts index 86d6d56070..b03d6f1d2c 100755 --- a/src/cli/main.ts +++ b/src/cli/main.ts @@ -274,7 +274,7 @@ export async function main(): Promise { modeValidation(globalArgs); // TODO: fix this, we do transformation to options and teh type doesn't reflect it validateUnsupportedOptionCombinations( - (globalArgs.options as unknown) as AllSupportedCliOptions, + globalArgs.options as unknown as AllSupportedCliOptions, ); if (globalArgs.options['group-issues'] && globalArgs.options['iac']) { diff --git a/src/lib/common.ts b/src/lib/common.ts index 94b579feeb..f81dda1fe7 100644 --- a/src/lib/common.ts +++ b/src/lib/common.ts @@ -48,8 +48,7 @@ export function testPlatformSupport() { if (analytics.allowAnalytics()) { const sentryError = new Error('Unsupported Platform: ' + currentPlatform); Sentry.init({ - dsn: - 'https://3e845233db8c4f43b4c4b9245f1d7bd6@o30291.ingest.sentry.io/4504599528079360', + dsn: 'https://3e845233db8c4f43b4c4b9245f1d7bd6@o30291.ingest.sentry.io/4504599528079360', release: version.getVersion(), }); Sentry.captureException(sentryError); diff --git a/src/lib/config/index.ts b/src/lib/config/index.ts index 54de2e26f9..05a966fcc8 100644 --- a/src/lib/config/index.ts +++ b/src/lib/config/index.ts @@ -35,12 +35,11 @@ interface Config { IAC_POLICY_ENGINE_PATH?: string; IAC_RULES_CLIENT_URL?: string; PUBLIC_VULN_DB_URL: string; + PUBLIC_LICENSE_URL: string; } // TODO: fix the types! -const config = (snykConfig.loadConfig( - __dirname + '/../..', -) as unknown) as Config; +const config = snykConfig.loadConfig(__dirname + '/../..') as unknown as Config; const defaultApiUrl = 'https://api.snyk.io'; const configDefinedApiUrl = userConfig.get('endpoint'); @@ -89,6 +88,7 @@ if (!config.ROOT) { } config.PUBLIC_VULN_DB_URL = 'https://security.snyk.io'; +config.PUBLIC_LICENSE_URL = 'https://snyk.io'; config.CODE_CLIENT_PROXY_URL = process.env.SNYK_CODE_CLIENT_PROXY_URL || ''; diff --git a/src/lib/constants.ts b/src/lib/constants.ts index c88460e4d2..61b2bd178e 100644 --- a/src/lib/constants.ts +++ b/src/lib/constants.ts @@ -10,3 +10,6 @@ export const CALL_PATH_LEADING_ELEMENTS = 2; // Number of function names to show in the end of an abbreviated call path export const CALL_PATH_TRAILING_ELEMENTS = 2; + +// Number of subdirectories to search when running monitor or test +export const MAX_DETECTION_DEPTH = 4; diff --git a/src/lib/display-policy.ts b/src/lib/display-policy.ts index 9b086406e8..a7b0976bd1 100644 --- a/src/lib/display-policy.ts +++ b/src/lib/display-policy.ts @@ -1,9 +1,9 @@ import chalk from 'chalk'; -import { demunge } from 'snyk-policy'; +import { demunge, Policy, VulnRules } from 'snyk-policy'; import config from './config'; -export async function display(policy) { - const p = demunge(policy, config.PUBLIC_VULN_DB_URL); +export async function display(policy: Policy): Promise { + const p = demunge(policy, apiRoot); const delimiter = '\n\n------------------------\n'; let res = @@ -27,8 +27,8 @@ export async function display(policy) { return Promise.resolve(res); } - -function displayRule(title) { +// id url paths, path reason expires +function displayRule(title: string): (rule: VulnRules, i: number) => string { return (rule, i) => { i += 1; @@ -49,7 +49,7 @@ function displayRule(title) { ? '\nReason: ' + p.reason + '\nExpires: ' + - p.expires.toUTCString() + + p.expires?.toUTCString() + '\n' : '') + '\n' @@ -60,3 +60,11 @@ function displayRule(title) { ); }; } + +function apiRoot(vulnId: string) { + const match = new RegExp(/^snyk:lic/i).test(vulnId); + if (match) { + return config.PUBLIC_LICENSE_URL; + } + return config.PUBLIC_VULN_DB_URL; +} diff --git a/src/lib/ecosystems/monitor.ts b/src/lib/ecosystems/monitor.ts index 591ba68db3..1d95921bf3 100644 --- a/src/lib/ecosystems/monitor.ts +++ b/src/lib/ecosystems/monitor.ts @@ -133,10 +133,8 @@ async function monitorDependencies( for (const [path, scanResults] of Object.entries(scans)) { await spinner(`Monitoring dependencies in ${path}`); for (const scanResult of scanResults) { - const monitorDependenciesRequest = await generateMonitorDependenciesRequest( - scanResult, - options, - ); + const monitorDependenciesRequest = + await generateMonitorDependenciesRequest(scanResult, options); const configOrg = config.org ? decodeURIComponent(config.org) : undefined; @@ -154,9 +152,8 @@ async function monitorDependencies( }, }; try { - const response = await makeRequest( - payload, - ); + const response = + await makeRequest(payload); results.push({ ...response, path, @@ -239,8 +236,8 @@ export async function getFormattedMonitorOutput( res.data && res.data.userMessage ? chalk.bold.red(res.data.userMessage) : res.data - ? res.data.message - : 'Unknown error occurred.'; + ? res.data.message + : 'Unknown error occurred.'; return ( chalk.bold.white('\nMonitoring ' + res.path + '...\n\n') + errorMessage diff --git a/src/lib/ecosystems/policy.ts b/src/lib/ecosystems/policy.ts index 099fe2415f..f72d0f4a2c 100644 --- a/src/lib/ecosystems/policy.ts +++ b/src/lib/ecosystems/policy.ts @@ -4,7 +4,7 @@ import { SupportedPackageManagers } from '../package-managers'; import { findAndLoadPolicy } from '../policy'; import { Options, PolicyOptions } from '../types'; import { Issue, IssuesData, ScanResult } from './types'; -import { Policy } from '../policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; export async function findAndLoadPolicyForScanResult( scanResult: ScanResult, @@ -53,8 +53,8 @@ export function filterIgnoredIssues( if (!allResourcesRule) { return true; } - const expiredIgnoreRule = + allResourcesRule['*'].expires && new Date(allResourcesRule['*'].expires) < new Date(); if (!expiredIgnoreRule) { delete filteredIssuesData[issue.issueId]; diff --git a/src/lib/ecosystems/resolve-test-facts.ts b/src/lib/ecosystems/resolve-test-facts.ts index 85eb24ef67..fef1753f08 100644 --- a/src/lib/ecosystems/resolve-test-facts.ts +++ b/src/lib/ecosystems/resolve-test-facts.ts @@ -185,11 +185,8 @@ export async function resolveAndTestFactsUnmanagedDeps( extractAndApplyPluginAnalytics(scanResult.analytics, id); } - const { - start_time, - dep_graph_data, - component_details, - } = await pollDepGraphAttributes(id, orgId); + const { start_time, dep_graph_data, component_details } = + await pollDepGraphAttributes(id, orgId); const { issues, diff --git a/src/lib/ecosystems/unmanaged/utils.ts b/src/lib/ecosystems/unmanaged/utils.ts index 24b073ee4a..892f9ccd34 100644 --- a/src/lib/ecosystems/unmanaged/utils.ts +++ b/src/lib/ecosystems/unmanaged/utils.ts @@ -109,7 +109,8 @@ export async function getOrgDefaultContext(): Promise { } export function isUUID(str) { - const uuidRegex = /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; + const uuidRegex = + /^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i; return uuidRegex.test(str); } diff --git a/src/lib/errors/legacy-errors.js b/src/lib/errors/legacy-errors.js index c7d8ab12ff..5c0b944ea3 100644 --- a/src/lib/errors/legacy-errors.js +++ b/src/lib/errors/legacy-errors.js @@ -71,7 +71,7 @@ module.exports = function error(command) { return Promise.reject(e); }; -module.exports.message = function(error) { +module.exports.message = function (error) { let message = error; // defaults to a string (which is super unlikely) if (error instanceof Error) { if (error.code === 'VULNS') { diff --git a/src/lib/find-files.ts b/src/lib/find-files.ts index 5da2328277..f841d34547 100644 --- a/src/lib/find-files.ts +++ b/src/lib/find-files.ts @@ -3,13 +3,14 @@ import * as pathLib from 'path'; import * as sortBy from 'lodash.sortby'; import * as groupBy from 'lodash.groupby'; -import * as assign from 'lodash.assign'; import { detectPackageManagerFromFile } from './detect'; import * as debugModule from 'debug'; import { PNPM_FEATURE_FLAG, SUPPORTED_MANIFEST_FILES, } from './package-managers'; +import * as merge from 'lodash.merge'; +import { MAX_DETECTION_DEPTH } from './constants'; const debug = debugModule('snyk:find-files'); @@ -55,28 +56,12 @@ interface FindFilesRes { const ignoreFolders = ['node_modules', '.build']; interface FindFilesConfig { - path: string; - ignore?: string[]; - filter?: string[]; - levelsDeep?: number; - featureFlags?: Set; -} - -type DefaultFindConfig = { path: string; ignore: string[]; filter: string[]; levelsDeep: number; featureFlags: Set; -}; - -const defaultFindConfig: DefaultFindConfig = { - path: '', - ignore: [], - filter: [], - levelsDeep: 4, - featureFlags: new Set(), -}; +} /** * Find all files in given search path. Returns paths to files found. @@ -86,8 +71,10 @@ const defaultFindConfig: DefaultFindConfig = { * @param filter (optional) file names to find. If not provided all files are returned. * @param levelsDeep (optional) how many levels deep to search, defaults to two, this path and one sub directory. */ -export async function find(findConfig: FindFilesConfig): Promise { - const config: DefaultFindConfig = assign({}, defaultFindConfig, findConfig); +export async function find( + findConfig: Partial, +): Promise { + const config = getFindConfig(findConfig); const found: string[] = []; const foundAll: string[] = []; @@ -152,10 +139,28 @@ function findFile(path: string, filter: string[] = []): string | null { return null; } +function getFindConfig(option: Partial): FindFilesConfig { + const result = merge( + { + path: '', + ignore: [], + filter: [], + levelsDeep: MAX_DETECTION_DEPTH, + featureFlags: new Set(), + }, + option, + ); + + if (isNaN(result.levelsDeep) || result.levelsDeep === null) { + result.levelsDeep = MAX_DETECTION_DEPTH; + } + return result; +} + async function findInDirectory( findConfig: FindFilesConfig, ): Promise { - const config: DefaultFindConfig = assign({}, defaultFindConfig, findConfig); + const config = getFindConfig(findConfig); const files = await readDirectory(config.path); const toFind = files .filter((file) => !config.ignore.includes(file)) diff --git a/src/lib/formatters/iac-output/text/failures/list.ts b/src/lib/formatters/iac-output/text/failures/list.ts index ad590f0ee4..adb8ad9dd5 100644 --- a/src/lib/formatters/iac-output/text/failures/list.ts +++ b/src/lib/formatters/iac-output/text/failures/list.ts @@ -77,9 +77,8 @@ export function formatIacTestWarnings(testWarnings: IaCTestWarning[]): string { } function formatWarningsList(testWarnings: IaCTestWarning[]): string { - const testWarningsByReasonAndPath = groupTestWarningsByReasonAndPath( - testWarnings, - ); + const testWarningsByReasonAndPath = + groupTestWarningsByReasonAndPath(testWarnings); return Object.values(testWarningsByReasonAndPath) .map((testWarning) => { @@ -96,9 +95,9 @@ type groupedIacTestWarnings = { expressions: string[]; }; -function groupTestWarningsByReasonAndPath( - testWarnings: IaCTestWarning[], -): { [key: string]: groupedIacTestWarnings } { +function groupTestWarningsByReasonAndPath(testWarnings: IaCTestWarning[]): { + [key: string]: groupedIacTestWarnings; +} { return testWarnings.reduce( (groupedWarnings: { [key: string]: groupedIacTestWarnings }, warning) => { const reasonAndPath = `${warning.warningReason}${warning.filePath}`; diff --git a/src/lib/formatters/iac-output/text/issues-list/index.ts b/src/lib/formatters/iac-output/text/issues-list/index.ts index 9acaea8891..3a8d7cb747 100644 --- a/src/lib/formatters/iac-output/text/issues-list/index.ts +++ b/src/lib/formatters/iac-output/text/issues-list/index.ts @@ -30,9 +30,8 @@ export function getIacDisplayedIssues( const severitySectionsOutput = Object.values(SEVERITY) .filter((severity) => !!resultsBySeverity[severity]) .map((severity) => { - const severityResults: FormattedOutputResult[] = resultsBySeverity[ - severity - ]!; + const severityResults: FormattedOutputResult[] = + resultsBySeverity[severity]!; const titleOutput = colors.title( `${capitalize(severity)} Severity Issues: ${severityResults.length}`, diff --git a/src/lib/formatters/iac-output/text/utils.ts b/src/lib/formatters/iac-output/text/utils.ts index 71a5be1f39..af50fb2533 100644 --- a/src/lib/formatters/iac-output/text/utils.ts +++ b/src/lib/formatters/iac-output/text/utils.ts @@ -39,11 +39,10 @@ export const maxLineWidth = process.stdout.columns export const countSuppressedIssues = ( suppressedIssues: Record, ): number => { - return Object.values(suppressedIssues).reduce(function( + return Object.values(suppressedIssues).reduce(function ( count, resourcesForRuleId, ) { return (count += resourcesForRuleId.length); - }, - 0); + }, 0); }; diff --git a/src/lib/formatters/open-source-sarif-output.ts b/src/lib/formatters/open-source-sarif-output.ts index 0388674197..52e68a00bf 100644 --- a/src/lib/formatters/open-source-sarif-output.ts +++ b/src/lib/formatters/open-source-sarif-output.ts @@ -106,6 +106,6 @@ function getIntroducedThrough(vuln: AnnotatedIssue) { return vuln.from.length > 2 ? `${firstFrom}, ${secondFrom} and others` : vuln.from.length === 2 - ? `${firstFrom} and ${secondFrom}` - : firstFrom; + ? `${firstFrom} and ${secondFrom}` + : firstFrom; } diff --git a/src/lib/formatters/test/format-test-results.ts b/src/lib/formatters/test/format-test-results.ts index dff3d27d18..5354c861dd 100644 --- a/src/lib/formatters/test/format-test-results.ts +++ b/src/lib/formatters/test/format-test-results.ts @@ -355,9 +355,7 @@ function getDockerRemediationDocsLink(dockerAdvice: string, config): string { ); } -export function groupVulnerabilities( - vulns, -): { +export function groupVulnerabilities(vulns): { [vulnId: string]: GroupedVuln; } { return vulns.reduce((map, curr) => { diff --git a/src/lib/iac/drift.ts b/src/lib/iac/drift.ts index fad0c3f883..18f4b772fa 100644 --- a/src/lib/iac/drift.ts +++ b/src/lib/iac/drift.ts @@ -5,7 +5,7 @@ import { DriftctlExecutionResult, GenDriftIgnoreOptions, } from './types'; -import { Policy } from '../policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import snykLogoSVG from './assets/snyk-logo'; import snykFaviconBase64 from './assets/snyk-favicon'; import { getHumanReadableAnalysis } from './drift/output'; @@ -20,7 +20,7 @@ export function driftignoreFromPolicy(policy: Policy | undefined): string[] { if (!policy || !policy.exclude || !(excludeSection in policy.exclude)) { return []; } - return policy.exclude[excludeSection]; + return policy.exclude[excludeSection] as string[]; } export const updateExcludeInPolicy = ( @@ -40,7 +40,11 @@ export const updateExcludeInPolicy = ( } if (!policy.exclude) { - policy.exclude = {}; + policy.exclude = { + global: [], + code: [], + 'iac-drift': [], + }; } policy.exclude['iac-drift'] = excludedResources; diff --git a/src/lib/iac/drift/driftctl.ts b/src/lib/iac/drift/driftctl.ts index 7462bf7b25..6c78aa48a9 100644 --- a/src/lib/iac/drift/driftctl.ts +++ b/src/lib/iac/drift/driftctl.ts @@ -60,7 +60,7 @@ const driftctlChecksums = { 'e6bbdf341148e81511d30dd5afe2fa2ef08f3b0b75079bf0bde2b790d75beb8a', }; -const dctlBaseUrl = 'https://static.snyk.io/cli/driftctl/'; +const dctlBaseUrl = 'https://downloads.snyk.io/cli/driftctl/'; const driftctlPath: string = path.join( cachePath, @@ -331,7 +331,7 @@ async function download(url, destination: string): Promise { await spinner('Downloading...'); return new Promise((resolve, reject) => { - makeRequest(payload, function(err, res, body) { + makeRequest(payload, function (err, res, body) { try { if (err) { reject( @@ -368,10 +368,7 @@ function validateChecksum(body: string) { return; } - const computedHash = crypto - .createHash('sha256') - .update(body) - .digest('hex'); + const computedHash = crypto.createHash('sha256').update(body).digest('hex'); const givenHash = driftctlChecksums[driftctlFileName()]; if (computedHash != givenHash) { diff --git a/src/lib/iac/envelope-formatters.ts b/src/lib/iac/envelope-formatters.ts index a1491b708c..e879bd8f48 100644 --- a/src/lib/iac/envelope-formatters.ts +++ b/src/lib/iac/envelope-formatters.ts @@ -4,7 +4,7 @@ import { PolicyMetadata, } from '../../cli/commands/test/iac/local-execution/types'; import { GitTarget, NamedTarget, ScanResult } from '../ecosystems/types'; -import { Policy } from '../policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { IacOutputMeta } from '../types'; export function convertIacResultToScanResult( diff --git a/src/lib/iac/test/v2/analytics/iac-cloud-context.ts b/src/lib/iac/test/v2/analytics/iac-cloud-context.ts index 1477f92512..96c0aedb5c 100644 --- a/src/lib/iac/test/v2/analytics/iac-cloud-context.ts +++ b/src/lib/iac/test/v2/analytics/iac-cloud-context.ts @@ -23,9 +23,8 @@ export function getIacCloudContext( let iacCloudContextSuppressedIssuesCount = 0; const suppressedIssues = testOutput.results?.scanAnalytics?.suppressedResults; if (suppressedIssues) { - iacCloudContextSuppressedIssuesCount = countSuppressedIssues( - suppressedIssues, - ); + iacCloudContextSuppressedIssuesCount = + countSuppressedIssues(suppressedIssues); } return { diff --git a/src/lib/iac/test/v2/analytics/iac-type.ts b/src/lib/iac/test/v2/analytics/iac-type.ts index 0ebb5b4178..928d4e5013 100644 --- a/src/lib/iac/test/v2/analytics/iac-type.ts +++ b/src/lib/iac/test/v2/analytics/iac-type.ts @@ -2,15 +2,13 @@ import { SEVERITY } from '../../../../snyk-test/legacy'; import { ResourceKind, TestOutput } from '../scan/results'; export function getIacType(testOutput: TestOutput): IacType { - const resourcesCountByPackageManager = getResourcesCountByPackageManager( - testOutput, - ); + const resourcesCountByPackageManager = + getResourcesCountByPackageManager(testOutput); const filesCountByPackageManager = getFilesCountByPackageManager(testOutput); - const vulnAnalyticsByPackageManager = getVulnerabilityAnalyticsByPackageManager( - testOutput, - ); + const vulnAnalyticsByPackageManager = + getVulnerabilityAnalyticsByPackageManager(testOutput); return Object.keys(resourcesCountByPackageManager).reduce( (acc, packageManager) => { @@ -68,19 +66,22 @@ function getFilesCountByPackageManager( } return Object.entries( - testOutput.results.resources.reduce((acc, resource) => { - const packageManager = resource.kind; - - if (!acc[packageManager]) { - acc[packageManager] = new Set(); - } - - if (resource.file) { - acc[packageManager].add(resource.file); - } - - return acc; - }, {} as { [packageManager in PackageManager]: Set }), + testOutput.results.resources.reduce( + (acc, resource) => { + const packageManager = resource.kind; + + if (!acc[packageManager]) { + acc[packageManager] = new Set(); + } + + if (resource.file) { + acc[packageManager].add(resource.file); + } + + return acc; + }, + {} as { [packageManager in PackageManager]: Set }, + ), ).reduce((acc, [packageManager, filesSet]) => { acc[packageManager] = filesSet.size; diff --git a/src/lib/iac/test/v2/index.ts b/src/lib/iac/test/v2/index.ts index fdc6f1d5bd..7b915960b7 100644 --- a/src/lib/iac/test/v2/index.ts +++ b/src/lib/iac/test/v2/index.ts @@ -7,11 +7,8 @@ import { addIacAnalytics } from './analytics'; export { TestConfig } from './types'; export async function test(testConfig: TestConfig): Promise { - const { - policyEnginePath, - rulesBundlePath, - rulesClientURL, - } = await initLocalCache(testConfig); + const { policyEnginePath, rulesBundlePath, rulesClientURL } = + await initLocalCache(testConfig); const testOutput = await scan( testConfig, diff --git a/src/lib/iac/test/v2/json.ts b/src/lib/iac/test/v2/json.ts index 5724b1e3d3..effb37bec4 100644 --- a/src/lib/iac/test/v2/json.ts +++ b/src/lib/iac/test/v2/json.ts @@ -254,9 +254,8 @@ function vulnerabilitiesToResult( vulnerabilitiesToKind(passedVulnerabilities); const ignoreSettings = testOutput.settings.ignoreSettings; const meta = orgSettingsToMeta(testOutput, ignoreSettings); - const infrastructureAsCodeIssues = vulnerabilitiesToIacIssues( - vulnerabilities, - ); + const infrastructureAsCodeIssues = + vulnerabilitiesToIacIssues(vulnerabilities); const infrastructureAsCodeSuccesses = passedVulnerabilitiesToIacSuccesses( passedVulnerabilities, ); diff --git a/src/lib/iac/test/v2/local-cache/policy-engine/constants/utils.ts b/src/lib/iac/test/v2/local-cache/policy-engine/constants/utils.ts index c7571f9618..87134b85f8 100644 --- a/src/lib/iac/test/v2/local-cache/policy-engine/constants/utils.ts +++ b/src/lib/iac/test/v2/local-cache/policy-engine/constants/utils.ts @@ -1,11 +1,11 @@ import * as os from 'os'; const policyEngineChecksums = ` -102442f1a622c4006207b5fb3822ea493000fe711beeb1341359f14057872b67 snyk-iac-test_0.54.0_Darwin_arm64 -11cc1ed464380932cf46551a1a1eb8fbaea0cae2da0853b7dccbe58d872bc44e snyk-iac-test_0.54.0_Linux_arm64 -238cb88c2315d9bcca9a7f9a277934074f50902fdd595572cd739f4601b25ed1 snyk-iac-test_0.54.0_Linux_x86_64 -487291b0193f3ed1a6647c631dfaa401faa81509d6c7fa328e1f29296115668b snyk-iac-test_0.54.0_Darwin_x86_64 -c94f91823e135c9e585bb41e5c274116001f079a28865a62e19ff9c8688a88e0 snyk-iac-test_0.54.0_Windows_x86_64.exe +07b2676b6356acc480267beb708e730d826c4949d1fb6d683f069360a6bfd077 snyk-iac-test_0.56.1_Linux_arm64 +2f9c1546866cf393aced662061463a871f688f8abc228bb57b6bb415e3cf8c5a snyk-iac-test_0.56.1_Darwin_x86_64 +42695a4b2b25ef1ee84d62c4478158a07f748dbc2bcf90bb836b16853e5b614f snyk-iac-test_0.56.1_Windows_x86_64.exe +7e16bd9fe511b5d8ada9a577f5fd9dc5761f868f3dac11af452ae872436576bc snyk-iac-test_0.56.1_Linux_x86_64 +df947b33c41a8c63020d4d3e2d8f405c004cafa06b866d62c1e3b29977732271 snyk-iac-test_0.56.1_Darwin_arm64 `; export const policyEngineVersion = getPolicyEngineVersion(); diff --git a/src/lib/iac/test/v2/local-cache/policy-engine/download.ts b/src/lib/iac/test/v2/local-cache/policy-engine/download.ts index 95283c66ca..1b2092cd4b 100644 --- a/src/lib/iac/test/v2/local-cache/policy-engine/download.ts +++ b/src/lib/iac/test/v2/local-cache/policy-engine/download.ts @@ -58,7 +58,7 @@ async function fetch(): Promise { return policyEngineDataBuffer; } -export const policyEngineUrl = `https://static.snyk.io/cli/iac/test/v${policyEngineReleaseVersion}/${policyEngineFileName}`; +export const policyEngineUrl = `https://downloads.snyk.io/cli/iac/test/v${policyEngineReleaseVersion}/${policyEngineFileName}`; export class FailedToDownloadPolicyEngineError extends CustomError { constructor() { diff --git a/src/lib/iac/test/v2/output.ts b/src/lib/iac/test/v2/output.ts index 82d3bbe023..f937503a88 100644 --- a/src/lib/iac/test/v2/output.ts +++ b/src/lib/iac/test/v2/output.ts @@ -252,8 +252,8 @@ export class NoSuccessfulScansError extends FormattedCustomError { const message = options.json ? responseData.json : options.sarif - ? responseData.sarif - : firstErr.message; + ? responseData.sarif + : firstErr.message; super( message, isText diff --git a/src/lib/iac/test/v2/scan/index.ts b/src/lib/iac/test/v2/scan/index.ts index 1b5bdef7b1..32e43d6a2a 100644 --- a/src/lib/iac/test/v2/scan/index.ts +++ b/src/lib/iac/test/v2/scan/index.ts @@ -13,6 +13,7 @@ import config from '../../../../config'; import { api, getOAuthToken } from '../../../../api-token'; import envPaths from 'env-paths'; import { restoreEnvProxy } from '../../../env-utils'; +import * as analytics from '../../../../analytics'; const debug = newDebug('snyk-iac'); const debugOutput = newDebug('snyk-iac:output'); @@ -129,6 +130,10 @@ function processFlags( // required for infrastructureAsCodeSuccesses to be populated flags.push('-include-passed-vulnerabilities'); + if (analytics.allowAnalytics()) { + flags.push('-allow-analytics'); + } + if (options.severityThreshold) { flags.push('-severity-threshold', options.severityThreshold); } @@ -177,6 +182,10 @@ function processFlags( flags.push('-rulesClientURL', rulesClientURL); } + if (options.iacNewEngine) { + flags.push('-iac-new-engine'); + } + return flags; } diff --git a/src/lib/iac/test/v2/types.ts b/src/lib/iac/test/v2/types.ts index 7000816db1..e0a440381b 100644 --- a/src/lib/iac/test/v2/types.ts +++ b/src/lib/iac/test/v2/types.ts @@ -20,4 +20,5 @@ export interface TestConfig { org?: string; customRules?: boolean; experimental?: boolean; + iacNewEngine?: boolean; } diff --git a/src/lib/index.js b/src/lib/index.js index ed815a014e..e246398a51 100644 --- a/src/lib/index.js +++ b/src/lib/index.js @@ -16,16 +16,15 @@ const apiToken = require('./api-token'); Object.defineProperty(snyk, 'api', { enumerable: true, configurable: true, - get: function() { + get: function () { return apiToken.api(); }, - set: function(value) { + set: function (value) { snykConfig.api = value; }, }); snyk.test = require('./snyk-test'); -snyk.policy = require('snyk-policy'); // this is the user config, and not the internal config snyk.config = require('./user-config').config; diff --git a/src/lib/monitor/dev-count-analysis.ts b/src/lib/monitor/dev-count-analysis.ts index afe941a5ae..c13589950f 100644 --- a/src/lib/monitor/dev-count-analysis.ts +++ b/src/lib/monitor/dev-count-analysis.ts @@ -21,10 +21,8 @@ export async function getContributors( repoPath: process.cwd(), }, ): Promise { - const timestampStartOfContributingDeveloperPeriod = getTimestampStartOfContributingDevTimeframe( - endDate, - periodDays, - ); + const timestampStartOfContributingDeveloperPeriod = + getTimestampStartOfContributingDevTimeframe(endDate, periodDays); const gitLogResults = await runGitLog( timestampStartOfContributingDeveloperPeriod, Math.floor(endDate.getTime() / 1000), diff --git a/src/lib/monitor/index.ts b/src/lib/monitor/index.ts index c54085d6d9..61eb390beb 100644 --- a/src/lib/monitor/index.ts +++ b/src/lib/monitor/index.ts @@ -43,6 +43,7 @@ import { getTargetFile, } from './utils'; import { countPathsToGraphRoot } from '../utils'; +import { PackageExpanded } from 'snyk-resolve-deps/dist/types'; const debug = Debug('snyk'); @@ -187,7 +188,9 @@ async function monitorDepTree( root, meta.isDocker ? 'docker' : packageManager!, options, - depTree, + // TODO: fix this and send only send when we used resolve-deps for node + // it should be a ExpandedPkgTree type instead + depTree as unknown as PackageExpanded, targetFileDir, ); @@ -427,7 +430,7 @@ async function monitorDepGraphFromDepTree( options, // TODO: fix this and send only send when we used resolve-deps for node // it should be a ExpandedPkgTree type instead - depTree, + depTree as unknown as PackageExpanded, targetFileDir, ); @@ -437,10 +440,8 @@ async function monitorDepGraphFromDepTree( treeMissingDeps = missingDeps; } - const depGraph: depGraphLib.DepGraph = await depGraphLib.legacy.depTreeToGraph( - depTree, - packageManager, - ); + const depGraph: depGraphLib.DepGraph = + await depGraphLib.legacy.depTreeToGraph(depTree, packageManager); const target = await projectMetadata.getInfo(scannedProject, meta, depTree); if (isGitTarget(target) && target.branch) { diff --git a/src/lib/normalize-target-file.ts b/src/lib/normalize-target-file.ts new file mode 100644 index 0000000000..2a4b65abae --- /dev/null +++ b/src/lib/normalize-target-file.ts @@ -0,0 +1,21 @@ +import type { PluginMetadata } from '@snyk/cli-interface/legacy/plugin'; +import type { ScannedProjectCustom } from './plugins/get-multi-plugin-result'; +import type { ScannedProject } from '@snyk/cli-interface/legacy/common'; + +/** + * Normalizes the target file path for a scanned project across + * test and monitor workflows. + * + * @param {ScannedProject | ScannedProjectCustom} scannedProject - The scanned project containing metadata such as the target file path. + * @param {PluginMetadata} plugin - Metadata about the plugin used to scan the project, which may also include the target file path. + * @param {string} [fallback=''] - A fallback value to return if neither the scanned project nor the plugin contain a target file path. Defaults to an empty string. + * + * @returns {string} - The resolved target file path from either the scanned project, plugin, or the provided fallback value if none are available. + */ +export function normalizeTargetFile( + scannedProject: ScannedProject | ScannedProjectCustom, + plugin: PluginMetadata, + fallback = '', +): string { + return scannedProject.targetFile || plugin.targetFile || fallback; +} diff --git a/src/lib/package-managers.ts b/src/lib/package-managers.ts index a42080a7c2..514eb473b7 100644 --- a/src/lib/package-managers.ts +++ b/src/lib/package-managers.ts @@ -85,6 +85,7 @@ export const GRAPH_SUPPORTED_PACKAGE_MANAGERS: SupportedPackageManagers[] = [ 'yarn', 'rubygems', 'poetry', + 'cocoapods', ]; // For ecosystems with a flat set of libraries (e.g. Python, JVM), one can // "pin" a transitive dependency diff --git a/src/lib/plugins/get-deps-from-plugin.ts b/src/lib/plugins/get-deps-from-plugin.ts index 13c4039a27..718e7fd430 100644 --- a/src/lib/plugins/get-deps-from-plugin.ts +++ b/src/lib/plugins/get-deps-from-plugin.ts @@ -21,6 +21,7 @@ import { convertSingleResultToMultiCustom } from './convert-single-splugin-res-t import { convertMultiResultToMultiCustom } from './convert-multi-plugin-res-to-multi-custom'; import { processYarnWorkspaces } from './nodejs-plugin/yarn-workspaces-parser'; import { ScannedProject } from '@snyk/cli-interface/legacy/common'; +import { MAX_DETECTION_DEPTH } from '../constants'; const debug = debugModule('snyk-test'); @@ -43,7 +44,7 @@ export async function getDepsFromPlugin( ): Promise { if (Object.keys(multiProjectProcessors).some((key) => options[key])) { const scanType = options.yarnWorkspaces ? 'yarnWorkspaces' : 'allProjects'; - const levelsDeep = options.detectionDepth; + const levelsDeep = options.detectionDepth || MAX_DETECTION_DEPTH; const ignore = options.exclude ? options.exclude.split(',') : []; const { files: targetFiles, allFilesFound } = await find({ diff --git a/src/lib/plugins/get-extra-project-count.ts b/src/lib/plugins/get-extra-project-count.ts index a923d9f83c..c6d3ffca28 100644 --- a/src/lib/plugins/get-extra-project-count.ts +++ b/src/lib/plugins/get-extra-project-count.ts @@ -2,6 +2,7 @@ import { legacyPlugin as pluginApi } from '@snyk/cli-interface'; import { find } from '../find-files'; import { AUTO_DETECTABLE_FILES } from '../detect'; import { Options } from '../types'; +import { MAX_DETECTION_DEPTH } from '../constants'; export async function getExtraProjectCount( root: string, @@ -23,6 +24,8 @@ export async function getExtraProjectCount( path: root, ignore: [], filter: AUTO_DETECTABLE_FILES, + levelsDeep: MAX_DETECTION_DEPTH, + featureFlags: new Set(), }); const foundProjectsCount = extraTargetFiles.length > 1 ? extraTargetFiles.length - 1 : undefined; diff --git a/src/lib/plugins/get-multi-plugin-result.ts b/src/lib/plugins/get-multi-plugin-result.ts index f5943c231f..aac5fae1ef 100644 --- a/src/lib/plugins/get-multi-plugin-result.ts +++ b/src/lib/plugins/get-multi-plugin-result.ts @@ -75,10 +75,8 @@ export async function getMultiPluginResult( let unprocessedFilesfromWorkspaces = targetFiles; if (featureFlags.has(PNPM_FEATURE_FLAG)) { - const { - scannedProjects: scannedPnpmResults, - unprocessedFiles, - } = await processWorkspacesProjects(root, options, targetFiles, 'pnpm'); + const { scannedProjects: scannedPnpmResults, unprocessedFiles } = + await processWorkspacesProjects(root, options, targetFiles, 'pnpm'); unprocessedFilesfromWorkspaces = unprocessedFiles; allResults.push(...scannedPnpmResults); } @@ -94,15 +92,13 @@ export async function getMultiPluginResult( ); allResults.push(...scannedYarnResults); - const { - scannedProjects: scannedNpmResults, - unprocessedFiles, - } = await processWorkspacesProjects( - root, - options, - unprocessedFilesFromYarn, - 'npm', - ); + const { scannedProjects: scannedNpmResults, unprocessedFiles } = + await processWorkspacesProjects( + root, + options, + unprocessedFilesFromYarn, + 'npm', + ); allResults.push(...scannedNpmResults); debug(`Not part of a workspace: ${unprocessedFiles.join(', ')}}`); @@ -132,17 +128,19 @@ export async function getMultiPluginResult( resultWithScannedProjects = inspectRes; } - const pluginResultWithCustomScannedProjects = convertMultiResultToMultiCustom( - resultWithScannedProjects, - optionsClone.packageManager, - optionsClone.file, - ); + const pluginResultWithCustomScannedProjects = + convertMultiResultToMultiCustom( + resultWithScannedProjects, + optionsClone.packageManager, + optionsClone.file, + ); // annotate the package manager, project name & targetFile to be used // for test & monitor // TODO: refactor how we display meta to not have to do this - (options as any).projectNames = resultWithScannedProjects.scannedProjects.map( - (scannedProject) => scannedProject?.depTree?.name, - ); + (options as any).projectNames = + resultWithScannedProjects.scannedProjects.map( + (scannedProject) => scannedProject?.depTree?.name, + ); allResults.push(...pluginResultWithCustomScannedProjects.scannedProjects); } catch (error) { diff --git a/src/lib/plugins/nodejs-plugin/npm-lock-parser.ts b/src/lib/plugins/nodejs-plugin/npm-lock-parser.ts index ff92b87b33..f4af62dbda 100644 --- a/src/lib/plugins/nodejs-plugin/npm-lock-parser.ts +++ b/src/lib/plugins/nodejs-plugin/npm-lock-parser.ts @@ -55,9 +55,8 @@ export async function parse( debug(resolveModuleSpinnerLabel); const strictOutOfSync = options.strictOutOfSync !== false; - const lockfileVersion = lockFileParser.getLockfileVersionFromFile( - lockFileFullPath, - ); + const lockfileVersion = + lockFileParser.getLockfileVersionFromFile(lockFileFullPath); if ( lockfileVersion === NodeLockfileVersion.YarnLockV1 || lockfileVersion === NodeLockfileVersion.YarnLockV2 || diff --git a/src/lib/plugins/nodejs-plugin/npm-modules-parser.ts b/src/lib/plugins/nodejs-plugin/npm-modules-parser.ts index 7671afa93b..322fd64f19 100644 --- a/src/lib/plugins/nodejs-plugin/npm-modules-parser.ts +++ b/src/lib/plugins/nodejs-plugin/npm-modules-parser.ts @@ -1,6 +1,7 @@ import * as path from 'path'; import * as fs from 'fs'; -import * as resolveNodeDeps from 'snyk-resolve-deps'; +import { PackageExpanded } from 'snyk-resolve-deps/dist/types'; +import * as resolveDeps from 'snyk-resolve-deps'; import * as baseDebug from 'debug'; const isEmpty = require('lodash.isempty'); import { spinner } from '../../spinner'; @@ -14,7 +15,7 @@ export async function parse( root: string, targetFile: string, options: Options, -): Promise { +): Promise { if (targetFile.endsWith('yarn.lock')) { options.file = options.file && options.file.replace('yarn.lock', 'package.json'); @@ -44,7 +45,7 @@ export async function parse( name: packageJson.name || 'package.json', dependencies: {}, version: packageJson.version, - }), + } as unknown as PackageExpanded), ); } } catch (e) { @@ -77,7 +78,7 @@ export async function parse( try { await spinner.clear(resolveModuleSpinnerLabel)(); await spinner(resolveModuleSpinnerLabel); - return resolveNodeDeps( + return resolveDeps( root, Object.assign({}, options, { noFromArrays: true }), ); diff --git a/src/lib/plugins/nodejs-plugin/npm-workspaces-parser.ts b/src/lib/plugins/nodejs-plugin/npm-workspaces-parser.ts index 0131f54d31..0eacd63abf 100644 --- a/src/lib/plugins/nodejs-plugin/npm-workspaces-parser.ts +++ b/src/lib/plugins/nodejs-plugin/npm-workspaces-parser.ts @@ -166,9 +166,9 @@ export function packageJsonBelongsToWorkspace( const workspaceRootFolder = pathUtil.dirname( workspaceRoot.replace(/\\/g, '/'), ); - const workspacesGlobs = ( - workspacesMap[workspaceRoot].workspaces || [] - ).map((workspace) => pathUtil.join(workspaceRootFolder, workspace)); + const workspacesGlobs = (workspacesMap[workspaceRoot].workspaces || []).map( + (workspace) => pathUtil.join(workspaceRootFolder, workspace), + ); const match = micromatch.isMatch( packageJsonFileName.replace(/\\/g, '/'), diff --git a/src/lib/plugins/sast/analysis.ts b/src/lib/plugins/sast/analysis.ts index d6b4472693..a35f3f88c5 100644 --- a/src/lib/plugins/sast/analysis.ts +++ b/src/lib/plugins/sast/analysis.ts @@ -252,9 +252,9 @@ function parseSecurityResults(codeAnalysis: Log): Log { return codeAnalysis; } -function getSecurityRulesMap( - rules: ReportingDescriptor[], -): { [ruleId: string]: ReportingDescriptor[] } { +function getSecurityRulesMap(rules: ReportingDescriptor[]): { + [ruleId: string]: ReportingDescriptor[]; +} { const securityRulesMap = rules.reduce((acc, rule) => { const { id: ruleId, properties } = rule; const isSecurityRule = properties?.categories?.some( diff --git a/src/lib/plugins/sast/format/output-format.ts b/src/lib/plugins/sast/format/output-format.ts index 9eb2ad7c51..0bef86d3dd 100644 --- a/src/lib/plugins/sast/format/output-format.ts +++ b/src/lib/plugins/sast/format/output-format.ts @@ -136,9 +136,9 @@ function getIssues( return issues; } -function getRulesMap( - rules: Sarif.ReportingDescriptor[], -): { [ruleId: string]: Sarif.ReportingDescriptor } { +function getRulesMap(rules: Sarif.ReportingDescriptor[]): { + [ruleId: string]: Sarif.ReportingDescriptor; +} { const rulesMapByID = rules.reduce((acc, rule) => { acc[rule.id] = rule; return acc; diff --git a/src/lib/plugins/sast/index.ts b/src/lib/plugins/sast/index.ts index 5207e2ddbd..ad0151cdda 100644 --- a/src/lib/plugins/sast/index.ts +++ b/src/lib/plugins/sast/index.ts @@ -124,8 +124,9 @@ export const codePlugin: EcosystemPlugin = { } debug( chalk.bold.red( - `requestId: ${requestId} statusCode:${error.code || - error.statusCode}, message: ${error.statusText || error.message}`, + `requestId: ${requestId} statusCode:${ + error.code || error.statusCode + }, message: ${error.statusText || error.message}`, ), ); throw err; diff --git a/src/lib/policy/find-and-load-policy.ts b/src/lib/policy/find-and-load-policy.ts index 2f0589b92e..d811772298 100644 --- a/src/lib/policy/find-and-load-policy.ts +++ b/src/lib/policy/find-and-load-policy.ts @@ -1,10 +1,10 @@ import * as snykPolicyLib from 'snyk-policy'; import * as debugModule from 'debug'; -import { PackageExpanded } from 'snyk-resolve-deps'; +import { PackageExpanded } from 'snyk-resolve-deps/dist/types'; import { pluckPolicies } from '.'; import { SupportedPackageManagers } from '../package-managers'; -import { PackageJson, PolicyOptions } from '../types'; +import { PolicyOptions } from '../types'; import * as analytics from '../analytics'; const debug = debugModule('snyk'); @@ -15,7 +15,7 @@ export async function findAndLoadPolicy( options: PolicyOptions, pkg?: PackageExpanded, scannedProjectFolder?: string, -): Promise { +): Promise { const isDocker = scanType === 'docker'; const isNodeProject = ['npm', 'yarn', 'pnpm'].includes(scanType); // monitor @@ -27,7 +27,10 @@ export async function findAndLoadPolicy( } else if (isNodeProject) { // TODO: pluckPolicies expects a package.json object to // find and apply policies in node_modules - policyLocations = policyLocations.concat(pluckPolicies(pkg as PackageJson)); + // TODO: fix these types, this is a hack and is not correct + policyLocations = policyLocations.concat( + pluckPolicies(pkg as unknown as PackageExpanded), + ); } debug('Potential policy locations found:', policyLocations); @@ -49,9 +52,3 @@ export async function findAndLoadPolicy( } return policy; } - -export interface Policy { - filter(vulns: any, root?: string, matchStrategy?: string): any; - exclude?: { [key: string]: string[] }; - ignore?: any; -} diff --git a/src/lib/policy/pluck-policies.ts b/src/lib/policy/pluck-policies.ts index 8a6d83ef9f..93eabafc5d 100644 --- a/src/lib/policy/pluck-policies.ts +++ b/src/lib/policy/pluck-policies.ts @@ -1,12 +1,16 @@ const flatten = require('lodash.flatten'); -import { PackageExpanded } from 'snyk-resolve-deps'; +import { PackageExpanded } from 'snyk-resolve-deps/dist/types'; export function pluckPolicies(pkg: PackageExpanded): string[] | string { if (!pkg) { return []; } + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore: broken type if (pkg.snyk) { + // eslint-disable-next-line @typescript-eslint/ban-ts-comment + // @ts-ignore: broken type return pkg.snyk; } diff --git a/src/lib/protect-update-notification.ts b/src/lib/protect-update-notification.ts index 07c19eb9bf..4ca1954179 100644 --- a/src/lib/protect-update-notification.ts +++ b/src/lib/protect-update-notification.ts @@ -87,9 +87,8 @@ export function getPackageJsonPathsContainingSnykDependency( directoryWithPackageJson, 'package.json', ); - const packageJsonContainsSnykDep = checkPackageJsonForSnykDependency( - packageJsonPath, - ); + const packageJsonContainsSnykDep = + checkPackageJsonForSnykDependency(packageJsonPath); if (packageJsonContainsSnykDep) { packageJsonPathsWithSnykDepForProtect.push(packageJsonPath); } @@ -99,9 +98,8 @@ export function getPackageJsonPathsContainingSnykDependency( paths.forEach((testPath) => { if (packageJsonFileExistsInDirectory(testPath)) { const packageJsonPath = path.resolve(testPath, 'package.json'); - const packageJsonContainsSnykDep = checkPackageJsonForSnykDependency( - packageJsonPath, - ); + const packageJsonContainsSnykDep = + checkPackageJsonForSnykDependency(packageJsonPath); if (packageJsonContainsSnykDep) { packageJsonPathsWithSnykDepForProtect.push(packageJsonPath); } diff --git a/src/lib/request/snyk-http-client.ts b/src/lib/request/snyk-http-client.ts index c0b7501915..73e3994607 100644 --- a/src/lib/request/snyk-http-client.ts +++ b/src/lib/request/snyk-http-client.ts @@ -17,9 +17,7 @@ interface RequestInfo { family?: number; } -export async function snykHttpClient( - requestInfo: RequestInfo, -): Promise<{ +export async function snykHttpClient(requestInfo: RequestInfo): Promise<{ res: needle.NeedleResponse; body: any; }> { diff --git a/src/lib/snyk-test/assemble-payloads.ts b/src/lib/snyk-test/assemble-payloads.ts index 5ad8bc6267..76527108af 100644 --- a/src/lib/snyk-test/assemble-payloads.ts +++ b/src/lib/snyk-test/assemble-payloads.ts @@ -56,8 +56,9 @@ export async function assembleEcosystemPayloads( payloads.push({ method: 'POST', - url: `${config.API}${options.testDepGraphDockerEndpoint || - '/test-dependencies'}`, + url: `${config.API}${ + options.testDepGraphDockerEndpoint || '/test-dependencies' + }`, json: true, headers: { 'x-is-ci': isCI(), diff --git a/src/lib/snyk-test/legacy.ts b/src/lib/snyk-test/legacy.ts index df0bd06239..e82b587f44 100644 --- a/src/lib/snyk-test/legacy.ts +++ b/src/lib/snyk-test/legacy.ts @@ -367,7 +367,10 @@ function convertTestDepGraphResultToLegacy( const vulns: AnnotatedIssue[] = []; for (const pkgInfo of values(result.affectedPkgs)) { - for (const vulnPkgPath of depGraph.pkgPathsToRoot(pkgInfo.pkg)) { + const pkgPathsToRoot = depGraph.pkgPathsToRoot(pkgInfo.pkg, { + limit: options.maxVulnPaths, + }); + for (const vulnPkgPath of pkgPathsToRoot) { const legacyFromPath = pkgPathToLegacyPath(vulnPkgPath.reverse()); for (const pkgIssue of values(pkgInfo.issues)) { const vulnPathString = getVulnPathString( @@ -407,7 +410,7 @@ function convertTestDepGraphResultToLegacy( const pkgAndVersion = (pkgInfo.pkg.name + '@' + pkgInfo.pkg.version) as string; - const annotatedIssue = (Object.assign( + const annotatedIssue = Object.assign( {}, binariesVulns.issuesData[pkgIssue.issueId], { @@ -419,7 +422,7 @@ function convertTestDepGraphResultToLegacy( version: pkgInfo.pkg.version as string, nearestFixedInVersion: pkgIssue.fixInfo.nearestFixedInVersion, }, - ) as any) as AnnotatedIssue; // TODO(kyegupov): get rid of forced type assertion + ) as any as AnnotatedIssue; // TODO(kyegupov): get rid of forced type assertion vulns.push(annotatedIssue); } } diff --git a/src/lib/snyk-test/run-test.ts b/src/lib/snyk-test/run-test.ts index c38c98ca7b..4db89f492b 100644 --- a/src/lib/snyk-test/run-test.ts +++ b/src/lib/snyk-test/run-test.ts @@ -33,7 +33,7 @@ import { NotFoundError, ServiceUnavailableError, } from '../errors'; -import * as snyk from '../'; +import * as snykPolicy from 'snyk-policy'; import { isCI } from '../is-ci'; import { RETRY_ATTEMPTS, @@ -84,6 +84,8 @@ import { PNPM_FEATURE_FLAG, SUPPORTED_MANIFEST_FILES, } from '../package-managers'; +import { PackageExpanded } from 'snyk-resolve-deps/dist/types'; +import { normalizeTargetFile } from '../normalize-target-file'; const debug = debugModule('snyk:run-test'); @@ -429,7 +431,7 @@ async function parseRes( // refactor to separate if (depGraph && pkgManager) { res = convertTestDepGraphResultToLegacy( - (res as any) as TestDepGraphResponse, // Double "as" required by Typescript for dodgy assertions + res as any as TestDepGraphResponse, // Double "as" required by Typescript for dodgy assertions depGraph, pkgManager, options, @@ -473,7 +475,7 @@ async function parseRes( res.filesystemPolicy = !!payloadPolicy; if (!options['ignore-policy']) { res.policy = res.policy || (payloadPolicy as string); - const policy = await snyk.policy.loadFromText(res.policy); + const policy = await snykPolicy.loadFromText(res.policy); res = policy.filter(res, root); } analytics.add('vulns', res.vulnerabilities.length); @@ -634,9 +636,9 @@ async function assembleLocalPayloads( if (!options.json && !options.quiet) { console.warn( chalk.bold.red( - `${icon.ISSUE} ${failedResults.length}/${failedResults.length + - deps.scannedProjects - .length} potential projects failed to get dependencies.`, + `${icon.ISSUE} ${failedResults.length}/${ + failedResults.length + deps.scannedProjects.length + } potential projects failed to get dependencies.`, ), ); failedResults.forEach((f) => { @@ -694,12 +696,10 @@ async function assembleLocalPayloads( // prefer dep-graph fallback on dep tree // TODO: clean up once dep-graphs only - const pkg: - | DepTree - | depGraphLib.DepGraph - | undefined = scannedProject.depGraph - ? scannedProject.depGraph - : scannedProject.depTree; + const pkg: DepTree | depGraphLib.DepGraph | undefined = + scannedProject.depGraph + ? scannedProject.depGraph + : scannedProject.depTree; if (options['print-deps']) { if (scannedProject.depGraph) { @@ -727,8 +727,11 @@ async function assembleLocalPayloads( } // todo: normalize what target file gets used across plugins and functions - const targetFile = - scannedProject.targetFile || deps.plugin.targetFile || options.file; + const targetFile = normalizeTargetFile( + scannedProject, + deps.plugin, + options.file, + ); // Forcing options.path to be a string as pathUtil requires is to be stringified const targetFileRelativePath = targetFile @@ -751,7 +754,7 @@ async function assembleLocalPayloads( options, // TODO: fix this and send only send when we used resolve-deps for node // it should be a ExpandedPkgTree type instead - pkg, + pkg as unknown as PackageExpanded, targetFileDir, ); @@ -888,8 +891,9 @@ async function assembleRemotePayloads(root, options): Promise { addPackageAnalytics(pkg.name, pkg.version); const encodedName = encodeURIComponent(pkg.name + '@' + pkg.version); // options.vulnEndpoint is only used by `snyk protect` (i.e. local filesystem tests) - const url = `${config.API}${options.vulnEndpoint || - `/vuln/${options.packageManager}`}/${encodedName}`; + const url = `${config.API}${ + options.vulnEndpoint || `/vuln/${options.packageManager}` + }/${encodedName}`; return [ { method: 'GET', diff --git a/src/lib/spinner.ts b/src/lib/spinner.ts index 2c1789fe56..bb0128716a 100644 --- a/src/lib/spinner.ts +++ b/src/lib/spinner.ts @@ -92,7 +92,7 @@ function createSpinner(opt: SpinnerOptions): Spinner | false { let delay = typeof opt.delay === 'number' ? opt.delay : 2; - const interval = (setInterval(() => { + const interval = setInterval(() => { if (--delay >= 0) { return; } @@ -100,7 +100,7 @@ function createSpinner(opt: SpinnerOptions): Spinner | false { const c = sprite[s]; str.write(c + ' ' + (opt.label || '') + CR); wrote = true; - }, ms) as unknown) as NodeJS.Timer; + }, ms) as unknown as NodeJS.Timer; const unref = typeof opt.unref === 'boolean' ? opt.unref : true; if (unref && typeof interval.unref === 'function') { diff --git a/src/lib/types.ts b/src/lib/types.ts index 6e40ca59a3..cc93fef924 100644 --- a/src/lib/types.ts +++ b/src/lib/types.ts @@ -16,6 +16,7 @@ export interface TestOptions { traverseNodeModules?: boolean; pruneRepeatedSubdependencies?: boolean; showVulnPaths: ShowVulnPaths; + maxVulnPaths?: number; failOn?: FailOn; initScript?: string; yarnWorkspaces?: boolean; diff --git a/test/acceptance/deepcode-fake-server.ts b/test/acceptance/deepcode-fake-server.ts index 86250c5a26..169b19dfb0 100644 --- a/test/acceptance/deepcode-fake-server.ts +++ b/test/acceptance/deepcode-fake-server.ts @@ -45,7 +45,9 @@ export const fakeDeepCodeServer = (): FakeDeepCodeServer => { }; const popRequest = () => { - return requests.pop()!; + const request = requests?.pop(); + if (request) return request; + else throw new Error('No request found in requests array'); }; const popRequests = (num: number) => { @@ -85,6 +87,7 @@ export const fakeDeepCodeServer = (): FakeDeepCodeServer => { }; const app = express(); + app.use((req, res, next) => { requests.push(req); next(); @@ -179,7 +182,7 @@ export const fakeDeepCodeServer = (): FakeDeepCodeServer => { getRequests, popRequest, popRequests, - setCustomResponse: setCustomResponse, + setCustomResponse, setFiltersResponse, setSarifResponse, setNextResponse, diff --git a/test/acceptance/fake-server.ts b/test/acceptance/fake-server.ts index 9c3ba02284..3d3216956c 100644 --- a/test/acceptance/fake-server.ts +++ b/test/acceptance/fake-server.ts @@ -12,6 +12,7 @@ const featureFlagDefaults = (): Map => { return new Map([ ['cliFailFast', false], ['iacIntegratedExperience', false], + ['iacNewEngine', false], ['containerCliAppVulnsEnabled', true], ['enablePnpmCli', false], ]); @@ -91,7 +92,9 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }; const popRequest = () => { - return requests.pop()!; + const request = requests?.pop(); + if (request) return request; + else throw new Error('No request found in requests array'); }; const popRequests = (num: number) => { @@ -225,6 +228,89 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { } }); + // needed for code-client-go + app.get('/deeproxy/filters', (req, res) => { + res.status(200); + if (customResponse) { + res.send(customResponse); + } + res.send({}); + }); + + // needed for code-client-go + app.post('/deeproxy/bundle', (req, res) => { + res.status(200); + res.send({ + bundleHash: + 'faa6b7161c14f933ef4ca79a18ad9283eab362d5e6d3a977125eb95b37c377d8', + missingFiles: [], + }); + }); + + // needed for code-client-go + app.post(`/api/rest/orgs/:orgId/scans`, (req, res) => { + res.status(201); + res.send({ data: { id: 'a6fb2742-b67f-4dc3-bb27-42b67f1dc344' } }); + }); + + // needed for code-client-go + app.get(`/api/rest/orgs/:orgId/scans/:id`, (req, res) => { + res.status(200); + res.send({ + data: { + attributes: { + status: 'done', + components: [ + { findings_url: 'http://localhost:12345/api/code_mock_stream' }, + ], + }, + id: 'a6fb2742-b67f-4dc3-bb27-42b67f1dc344', + }, + }); + }); + + app.get(`/api/code_mock_stream`, (req, res) => { + res.status(200); + res.send({ + $schema: + 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json', + version: '2.1.0', + runs: [ + { + tool: { + driver: { + name: 'SnykCode', + semanticVersion: '1.0.0', + version: '1.0.0', + rules: [], + }, + }, + results: [ + { + ruleId: 'javascript/DisablePoweredBy', + ruleIndex: 1, + level: 'warning', + }, + ], + properties: { + coverage: [ + { + files: 8, + isSupported: true, + lang: 'JavaScript', + }, + { + files: 1, + isSupported: true, + lang: 'HTML', + }, + ], + }, + }, + ], + }); + }); + app.post(basePath + '/vuln/:registry', (req, res, next) => { const vulnerabilities = []; if (req.query.org && req.query.org === 'missing-org') { @@ -404,8 +490,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { isMonitored: true, trialStarted: true, licensesPolicy: {}, - uri: - 'http://example-url/project/project-public-id/history/snapshot-public-id', + uri: 'http://example-url/project/project-public-id/history/snapshot-public-id', projectName: 'test-project', }); }); @@ -611,6 +696,14 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }, ); + // needed for code-client-go + app.post( + basePath.replace('v1', 'hidden') + `/orgs/:orgId/workspaces`, + (req, res) => { + res.status(201).send({}); + }, + ); + app.post(`/rest/orgs/:orgId/sbom_tests`, (req, res) => { let testId = '4b341b8a-4697-4e35-928b-4b9ae37f8ea8'; @@ -627,8 +720,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { version: '1.0', }, links: { - self: - '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests?version=2023-08-31~beta', + self: '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests?version=2023-08-31~beta', related: '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests/4b341b8a-4697-4e35-928b-4b9ae37f8ea8?version=2023-08-31~beta', }, @@ -670,8 +762,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }, }, links: { - self: - '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests/4b341b8a-4697-4e35-928b-4b9ae37f8ea8?version=2023-08-31~beta', + self: '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests/4b341b8a-4697-4e35-928b-4b9ae37f8ea8?version=2023-08-31~beta', related: '/rest/orgs/aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee/sbom_tests/4b341b8a-4697-4e35-928b-4b9ae37f8ea8/results?version=2023-08-31~beta', }, @@ -782,6 +873,20 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { }, }; break; + case 'cyclonedx1.6+json': + bom = { + specVersion: '1.6', + $schema: 'http://cyclonedx.org/schema/bom-1.6.schema.json', + components, + metadata: { + component: { name }, + tools: { + components: [...tools, { name: 'fake-server' }], + services: [{ name: 'fake-server', version: '42' }], + }, + }, + }; + break; } res.status(200).send(bom); @@ -886,7 +991,7 @@ export const fakeServer = (basePath: string, snykToken: string): FakeServer => { getRequests, popRequest, popRequests, - setCustomResponse: setCustomResponse, + setCustomResponse, setLocalCodeEngineConfiguration, setNextResponse, setNextStatusCode, diff --git a/test/acceptance/workspace-helper.ts b/test/acceptance/workspace-helper.ts index 792738ef7d..a689ec0f8f 100644 --- a/test/acceptance/workspace-helper.ts +++ b/test/acceptance/workspace-helper.ts @@ -3,6 +3,14 @@ import { readFileSync } from 'fs'; const workspacePath = path.join(__dirname, 'workspaces'); +/** + * Changes the current working directory to the specified subdirectory within the workspace path. + * + * @param {string} subdir - The subdirectory to navigate to (optional). If not provided, the workspace path itself will be used. + * + * @example + * chdirWorkspaces('project1'); // Changes the working directory to '${workspacePath}/project1' + */ export function chdirWorkspaces(subdir = '') { const dir = path.join(workspacePath, subdir); process.chdir(dir); diff --git a/test/acceptance/workspaces/mono-repo-nested/.gitignore b/test/acceptance/workspaces/mono-repo-nested/.gitignore new file mode 100644 index 0000000000..9c108a1068 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/.gitignore @@ -0,0 +1,2 @@ +target +project/target diff --git a/test/acceptance/workspaces/mono-repo-nested/README.md b/test/acceptance/workspaces/mono-repo-nested/README.md new file mode 100644 index 0000000000..4290b5add4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/README.md @@ -0,0 +1,5 @@ +# Simple Monorepo + +This repository contains a number of different projects, both at the root and in directories. + +It is used as a simple test fixture for monorepo and multi-language support on [Snyk.io](https://snyk.io). As such, each "project" is merely the files needed to describe dependencies. diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile new file mode 100644 index 0000000000..8827060912 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile @@ -0,0 +1,5 @@ +source "https://rubygems.org" + +gem "rack-cache", "~> 1.1.0" +gem "rack", "~> 1.6.2" +gem "rack-protection", "~> 1.5.0" diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile.lock b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile.lock new file mode 100644 index 0000000000..f8cd4503f8 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/Gemfile.lock @@ -0,0 +1,19 @@ +GEM + remote: https://rubygems.org/ + specs: + rack (1.6.5) + rack-cache (1.1) + rack (>= 0.4) + rack-protection (1.5.3) + rack + +PLATFORMS + ruby + +DEPENDENCIES + rack (~> 1.6.2) + rack-cache (~> 1.1.0) + rack-protection (~> 1.5.0) + +BUNDLED WITH + 1.14.3 diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile new file mode 100644 index 0000000000..8827060912 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile @@ -0,0 +1,5 @@ +source "https://rubygems.org" + +gem "rack-cache", "~> 1.1.0" +gem "rack", "~> 1.6.2" +gem "rack-protection", "~> 1.5.0" diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile.lock b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile.lock new file mode 100644 index 0000000000..f8cd4503f8 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/Gemfile.lock @@ -0,0 +1,19 @@ +GEM + remote: https://rubygems.org/ + specs: + rack (1.6.5) + rack-cache (1.1) + rack (>= 0.4) + rack-protection (1.5.3) + rack + +PLATFORMS + ruby + +DEPENDENCIES + rack (~> 1.6.2) + rack-cache (~> 1.1.0) + rack-protection (~> 1.5.0) + +BUNDLED WITH + 1.14.3 diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile new file mode 100644 index 0000000000..eaaf55a49f --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile @@ -0,0 +1,6 @@ +source :rubygems + +gem "sinatra" +gem "haml" +gem "httparty" +gem "actionpack" diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile.lock b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile.lock new file mode 100644 index 0000000000..c204545ac4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/level-6/Gemfile.lock @@ -0,0 +1,71 @@ +GEM + remote: http://rubygems.org/ + specs: + actionpack (4.2.5) + actionview (= 4.2.5) + activesupport (= 4.2.5) + rack (~> 1.6) + rack-test (~> 0.6.2) + rails-dom-testing (~> 1.0, >= 1.0.5) + rails-html-sanitizer (~> 1.0, >= 1.0.2) + actionview (4.2.5) + activesupport (= 4.2.5) + builder (~> 3.1) + erubis (~> 2.7.0) + rails-dom-testing (~> 1.0, >= 1.0.5) + rails-html-sanitizer (~> 1.0, >= 1.0.2) + activesupport (4.2.5) + i18n (~> 0.7) + json (~> 1.7, >= 1.7.7) + minitest (~> 5.1) + thread_safe (~> 0.3, >= 0.3.4) + tzinfo (~> 1.1) + builder (3.2.2) + erubis (2.7.0) + haml (3.1.4) + httparty (0.8.1) + multi_json + multi_xml + i18n (0.7.0) + json (1.8.3) + loofah (2.0.3) + nokogiri (>= 1.5.9) + mini_portile2 (2.1.0) + minitest (5.9.1) + multi_json (1.12.1) + multi_xml (0.5.5) + nokogiri (1.6.8.1) + mini_portile2 (~> 2.1.0) + rack (1.6.4) + rack-protection (1.5.3) + rack + rack-test (0.6.3) + rack (>= 1.0) + rails-deprecated_sanitizer (1.0.3) + activesupport (>= 4.2.0.alpha) + rails-dom-testing (1.0.7) + activesupport (>= 4.2.0.beta, < 5.0) + nokogiri (~> 1.6.0) + rails-deprecated_sanitizer (>= 1.0.1) + rails-html-sanitizer (1.0.3) + loofah (~> 2.0) + sinatra (1.3.2) + rack (~> 1.3, >= 1.3.6) + rack-protection (~> 1.2) + tilt (~> 1.3, >= 1.3.3) + thread_safe (0.3.5) + tilt (1.4.1) + tzinfo (1.2.2) + thread_safe (~> 0.1) + +PLATFORMS + ruby + +DEPENDENCIES + actionpack + haml + httparty + sinatra + +BUNDLED WITH + 1.13.2 diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package-lock.json b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package-lock.json new file mode 100644 index 0000000000..fce7bf7f28 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package-lock.json @@ -0,0 +1,18 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "node-uuid": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.0.tgz", + "integrity": "sha1-B/myM3Vy/2J1x3Xh1IUT86RdemU=" + }, + "qs": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.0.6.tgz", + "integrity": "sha1-SBZZt+W/al6omAEN5a7TXrRp4SQ=" + } + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package.json b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package.json new file mode 100644 index 0000000000..4b2ecd8d3d --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/level-4/level-5/package.json @@ -0,0 +1,14 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "description": "A vulnerable demo application", + "homepage": "https://snyk.io/", + "repository": { + "type": "git", + "url": "https://github.com/Snyk/shallow-goof" + }, + "dependencies": { + "qs": "0.0.6", + "node-uuid": "1.4.0" + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package-lock.json b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package-lock.json new file mode 100644 index 0000000000..fce7bf7f28 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package-lock.json @@ -0,0 +1,18 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "node-uuid": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.0.tgz", + "integrity": "sha1-B/myM3Vy/2J1x3Xh1IUT86RdemU=" + }, + "qs": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.0.6.tgz", + "integrity": "sha1-SBZZt+W/al6omAEN5a7TXrRp4SQ=" + } + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package.json b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package.json new file mode 100644 index 0000000000..4b2ecd8d3d --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/level-2/level-3/package.json @@ -0,0 +1,14 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "description": "A vulnerable demo application", + "homepage": "https://snyk.io/", + "repository": { + "type": "git", + "url": "https://github.com/Snyk/shallow-goof" + }, + "dependencies": { + "qs": "0.0.6", + "node-uuid": "1.4.0" + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/README.md b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/README.md new file mode 100644 index 0000000000..d62f7a14d4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/README.md @@ -0,0 +1,203 @@ +# node-uuid + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Generate RFC4122 version 1 or version 4 UUIDs +* Runs in node.js and all browsers. +* Cryptographically strong random # generation on supporting platforms +* 1.1K minified and gzip'ed (Want something smaller? Check this [crazy shit](https://gist.github.com/982883) out! ) +* [Annotated source code](http://broofa.github.com/node-uuid/docs/uuid.html) + +## Getting Started + +Install it in your browser: + +```html + +``` + +Or in node.js: + +``` +npm install node-uuid +``` + +```javascript +var uuid = require('node-uuid'); +``` + +Then create some ids ... + +```javascript +// Generate a v1 (time-based) id +uuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' + +// Generate a v4 (random) id +uuid.v4(); // -> '110ec58a-a0f2-4ac4-8393-c866d813b8d1' +``` + +## API + +### uuid.v1([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number | Date) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Notes: + +1. The randomly generated node id is only guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.) + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v1({ + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}); // -> "710b962e-041c-11e1-9234-0123456789ab" +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +var arr = new Array(32); // -> [] +uuid.v1(null, arr, 0); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15] +uuid.v1(null, arr, 16); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15 02 a3 1c b0 14 32 11 e1 85 58 0b 48 8e 4f c1 15] + +// Optionally use uuid.unparse() to get stringify the ids +uuid.unparse(buffer); // -> '02a2ce90-1432-11e1-8558-0b488e4fc115' +uuid.unparse(buffer, 16) // -> '02a31cb0-1432-11e1-8558-0b488e4fc115' +``` + +### uuid.v4([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator to use. Set to one of the built-in generators - `uuid.mathRNG` (all platforms), `uuid.nodeRNG` (node.js only), `uuid.whatwgRNG` (WebKit only) - or a custom function that returns an array[16] of byte values. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v4({ + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}); +// -> "109156be-c4fb-41ea-b1b4-efe1671c5836" +``` + +Example: Generate two IDs in a single buffer + +```javascript +var buffer = new Array(32); // (or 'new Buffer' in node.js) +uuid.v4(null, buffer, 0); +uuid.v4(null, buffer, 16); +``` + +### uuid.parse(id[, buffer[, offset]]) +### uuid.unparse(buffer[, offset]) + +Parse and unparse UUIDs + + * `id` - (String) UUID(-like) string + * `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. Default: A new Array or Buffer is used + * `offset` - (Number) Starting index in `buffer` at which to begin writing. Default: 0 + +Example parsing and unparsing a UUID string + +```javascript +var bytes = uuid.parse('797ff043-11eb-11e1-80d6-510998755d10'); // -> +var string = uuid.unparse(bytes); // -> '797ff043-11eb-11e1-80d6-510998755d10' +``` + +### uuid.noConflict() + +(Browsers only) Set `uuid` property back to it's previous value. + +Returns the node-uuid object. + +Example: + +```javascript +var myUuid = uuid.noConflict(); +myUuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' +``` + +## Deprecated APIs + +Support for the following v1.2 APIs is available in v1.3, but is deprecated and will be removed in the next major version. + +### uuid([format [, buffer [, offset]]]) + +uuid() has become uuid.v4(), and the `format` argument is now implicit in the `buffer` argument. (i.e. if you specify a buffer, the format is assumed to be binary). + +### uuid.BufferClass + +The class of container created when generating binary uuid data if no buffer argument is specified. This is expected to go away, with no replacement API. + +## Testing + +In node.js + +``` +> cd test +> node test.js +``` + +In Browser + +``` +open test/test.html +``` + +### Benchmarking + +Requires node.js + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +For a more complete discussion of node-uuid performance, please see the `benchmark/README.md` file, and the [benchmark wiki](https://github.com/broofa/node-uuid/wiki/Benchmark) + +For browser performance [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance). + +## Release notes + +v1.4 +* Improved module context detection +* Removed public RNG functions + +v1.3.2: +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + +v1.3: +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/package.json b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/package.json new file mode 100644 index 0000000000..fc6297d963 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/node-uuid/package.json @@ -0,0 +1,56 @@ +{ + "_from": "node-uuid@1.4.0", + "_id": "node-uuid@1.4.0", + "_inBundle": false, + "_integrity": "sha1-B/myM3Vy/2J1x3Xh1IUT86RdemU=", + "_location": "/node-uuid", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "node-uuid@1.4.0", + "name": "node-uuid", + "escapedName": "node-uuid", + "rawSpec": "1.4.0", + "saveSpec": null, + "fetchSpec": "1.4.0" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.0.tgz", + "_shasum": "07f9b2337572ff6275c775e1d48513f3a45d7a65", + "_spec": "node-uuid@1.4.0", + "_where": "/Users/orsagie/snyk-fixtures/monorepo-simple", + "author": { + "name": "Robert Kieffer", + "email": "robert@broofa.com" + }, + "bugs": { + "url": "https://github.com/broofa/node-uuid/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Christoph Tavan", + "email": "dev@tavan.de" + } + ], + "deprecated": "Use uuid module instead", + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "homepage": "https://github.com/broofa/node-uuid#readme", + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "lib": ".", + "main": "./uuid.js", + "name": "node-uuid", + "repository": { + "type": "git", + "url": "git+https://github.com/broofa/node-uuid.git" + }, + "url": "http://github.com/broofa/node-uuid", + "version": "1.4.0" +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/Readme.md b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/Readme.md new file mode 100644 index 0000000000..78cbe24bd4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/Readme.md @@ -0,0 +1,38 @@ + +# node-querystring + + query string parser for node supporting nesting, as it was removed from `0.3.x`, so this library provides the previous and commonly desired behaviour (and twice as fast). Used by [express](http://expressjs.com), [connect](http://senchalabs.github.com/connect) and others. + +## Installation + + $ npm install qs + +## Examples + + require('querystring').parse('user[name][first]=tj&user[email]=tj'); + // => { user: { name: { first: 'tj' }}} + +## License + +(The MIT License) + +Copyright (c) 2010 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/package.json b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/package.json new file mode 100644 index 0000000000..a03148ff13 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/node_modules/qs/package.json @@ -0,0 +1,40 @@ +{ + "_from": "qs@0.0.6", + "_id": "qs@0.0.6", + "_inBundle": false, + "_integrity": "sha1-SBZZt+W/al6omAEN5a7TXrRp4SQ=", + "_location": "/qs", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "qs@0.0.6", + "name": "qs", + "escapedName": "qs", + "rawSpec": "0.0.6", + "saveSpec": null, + "fetchSpec": "0.0.6" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/qs/-/qs-0.0.6.tgz", + "_shasum": "481659b7e5bf6a5ea898010de5aed35eb469e124", + "_spec": "qs@0.0.6", + "_where": "/Users/orsagie/snyk-fixtures/monorepo-simple", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "querystring parser", + "engines": { + "node": "*" + }, + "main": "index", + "name": "qs", + "repository": {}, + "version": "0.0.6" +} diff --git a/test/acceptance/workspaces/mono-repo-nested/level-1/package.json b/test/acceptance/workspaces/mono-repo-nested/level-1/package.json new file mode 100644 index 0000000000..5211b26c8e --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/level-1/package.json @@ -0,0 +1,35 @@ +{ + "name": "goof", + "version": "0.0.3", + "description": "A vulnerable todo demo application", + "homepage": "https://snyk.io/", + "repository": { + "type": "git", + "url": "https://github.com/Snyk/snyk-todo-list-demo-app/" + }, + "scripts": { + "start": "node app.js", + "cleanup": "mongo express-todo --eval 'db.todos.remove({});'" + }, + "dependencies": { + "body-parser": "1.9.0", + "cookie-parser": "1.3.3", + "ejs": "1.0.0", + "ejs-locals": "1.0.2", + "errorhandler": "1.2.0", + "express": "4.12.4", + "express-fileupload": "0.0.5", + "humanize-ms": "1.0.1", + "marked": "0.3.5", + "method-override": "latest", + "moment": "2.15.1", + "mongoose": "4.2.4", + "morgan": "latest", + "ms": "^0.7.1", + "npmconf": "0.0.24", + "optional": "^0.1.3", + "st": "0.2.4", + "stream-buffers": "^3.0.1", + "tap": "^5.7.0" + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/README.md b/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/README.md new file mode 100644 index 0000000000..d62f7a14d4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/README.md @@ -0,0 +1,203 @@ +# node-uuid + +Simple, fast generation of [RFC4122](http://www.ietf.org/rfc/rfc4122.txt) UUIDS. + +Features: + +* Generate RFC4122 version 1 or version 4 UUIDs +* Runs in node.js and all browsers. +* Cryptographically strong random # generation on supporting platforms +* 1.1K minified and gzip'ed (Want something smaller? Check this [crazy shit](https://gist.github.com/982883) out! ) +* [Annotated source code](http://broofa.github.com/node-uuid/docs/uuid.html) + +## Getting Started + +Install it in your browser: + +```html + +``` + +Or in node.js: + +``` +npm install node-uuid +``` + +```javascript +var uuid = require('node-uuid'); +``` + +Then create some ids ... + +```javascript +// Generate a v1 (time-based) id +uuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' + +// Generate a v4 (random) id +uuid.v4(); // -> '110ec58a-a0f2-4ac4-8393-c866d813b8d1' +``` + +## API + +### uuid.v1([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v1 (timestamp-based) UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `node` - (Array) Node id as Array of 6 bytes (per 4.1.6). Default: Randomly generated ID. See note 1. + * `clockseq` - (Number between 0 - 0x3fff) RFC clock sequence. Default: An internally maintained clockseq is used. + * `msecs` - (Number | Date) Time in milliseconds since unix Epoch. Default: The current time is used. + * `nsecs` - (Number between 0-9999) additional time, in 100-nanosecond units. Ignored if `msecs` is unspecified. Default: internal uuid counter is used, as per 4.2.1.2. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Notes: + +1. The randomly generated node id is only guaranteed to stay constant for the lifetime of the current JS runtime. (Future versions of this module may use persistent storage mechanisms to extend this guarantee.) + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v1({ + node: [0x01, 0x23, 0x45, 0x67, 0x89, 0xab], + clockseq: 0x1234, + msecs: new Date('2011-11-01').getTime(), + nsecs: 5678 +}); // -> "710b962e-041c-11e1-9234-0123456789ab" +``` + +Example: In-place generation of two binary IDs + +```javascript +// Generate two ids in an array +var arr = new Array(32); // -> [] +uuid.v1(null, arr, 0); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15] +uuid.v1(null, arr, 16); // -> [02 a2 ce 90 14 32 11 e1 85 58 0b 48 8e 4f c1 15 02 a3 1c b0 14 32 11 e1 85 58 0b 48 8e 4f c1 15] + +// Optionally use uuid.unparse() to get stringify the ids +uuid.unparse(buffer); // -> '02a2ce90-1432-11e1-8558-0b488e4fc115' +uuid.unparse(buffer, 16) // -> '02a31cb0-1432-11e1-8558-0b488e4fc115' +``` + +### uuid.v4([`options` [, `buffer` [, `offset`]]]) + +Generate and return a RFC4122 v4 UUID. + +* `options` - (Object) Optional uuid state to apply. Properties may include: + + * `random` - (Number[16]) Array of 16 numbers (0-255) to use in place of randomly generated values + * `rng` - (Function) Random # generator to use. Set to one of the built-in generators - `uuid.mathRNG` (all platforms), `uuid.nodeRNG` (node.js only), `uuid.whatwgRNG` (WebKit only) - or a custom function that returns an array[16] of byte values. + +* `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. +* `offset` - (Number) Starting index in `buffer` at which to begin writing. + +Returns `buffer`, if specified, otherwise the string form of the UUID + +Example: Generate string UUID with fully-specified options + +```javascript +uuid.v4({ + random: [ + 0x10, 0x91, 0x56, 0xbe, 0xc4, 0xfb, 0xc1, 0xea, + 0x71, 0xb4, 0xef, 0xe1, 0x67, 0x1c, 0x58, 0x36 + ] +}); +// -> "109156be-c4fb-41ea-b1b4-efe1671c5836" +``` + +Example: Generate two IDs in a single buffer + +```javascript +var buffer = new Array(32); // (or 'new Buffer' in node.js) +uuid.v4(null, buffer, 0); +uuid.v4(null, buffer, 16); +``` + +### uuid.parse(id[, buffer[, offset]]) +### uuid.unparse(buffer[, offset]) + +Parse and unparse UUIDs + + * `id` - (String) UUID(-like) string + * `buffer` - (Array | Buffer) Array or buffer where UUID bytes are to be written. Default: A new Array or Buffer is used + * `offset` - (Number) Starting index in `buffer` at which to begin writing. Default: 0 + +Example parsing and unparsing a UUID string + +```javascript +var bytes = uuid.parse('797ff043-11eb-11e1-80d6-510998755d10'); // -> +var string = uuid.unparse(bytes); // -> '797ff043-11eb-11e1-80d6-510998755d10' +``` + +### uuid.noConflict() + +(Browsers only) Set `uuid` property back to it's previous value. + +Returns the node-uuid object. + +Example: + +```javascript +var myUuid = uuid.noConflict(); +myUuid.v1(); // -> '6c84fb90-12c4-11e1-840d-7b25c5ee775a' +``` + +## Deprecated APIs + +Support for the following v1.2 APIs is available in v1.3, but is deprecated and will be removed in the next major version. + +### uuid([format [, buffer [, offset]]]) + +uuid() has become uuid.v4(), and the `format` argument is now implicit in the `buffer` argument. (i.e. if you specify a buffer, the format is assumed to be binary). + +### uuid.BufferClass + +The class of container created when generating binary uuid data if no buffer argument is specified. This is expected to go away, with no replacement API. + +## Testing + +In node.js + +``` +> cd test +> node test.js +``` + +In Browser + +``` +open test/test.html +``` + +### Benchmarking + +Requires node.js + +``` +npm install uuid uuid-js +node benchmark/benchmark.js +``` + +For a more complete discussion of node-uuid performance, please see the `benchmark/README.md` file, and the [benchmark wiki](https://github.com/broofa/node-uuid/wiki/Benchmark) + +For browser performance [checkout the JSPerf tests](http://jsperf.com/node-uuid-performance). + +## Release notes + +v1.4 +* Improved module context detection +* Removed public RNG functions + +v1.3.2: +* Improve tests and handling of v1() options (Issue #24) +* Expose RNG option to allow for perf testing with different generators + +v1.3: +* Support for version 1 ids, thanks to [@ctavan](https://github.com/ctavan)! +* Support for node.js crypto API +* De-emphasizing performance in favor of a) cryptographic quality PRNGs where available and b) more manageable code diff --git a/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/package.json b/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/package.json new file mode 100644 index 0000000000..fc6297d963 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/node_modules/node-uuid/package.json @@ -0,0 +1,56 @@ +{ + "_from": "node-uuid@1.4.0", + "_id": "node-uuid@1.4.0", + "_inBundle": false, + "_integrity": "sha1-B/myM3Vy/2J1x3Xh1IUT86RdemU=", + "_location": "/node-uuid", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "node-uuid@1.4.0", + "name": "node-uuid", + "escapedName": "node-uuid", + "rawSpec": "1.4.0", + "saveSpec": null, + "fetchSpec": "1.4.0" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.0.tgz", + "_shasum": "07f9b2337572ff6275c775e1d48513f3a45d7a65", + "_spec": "node-uuid@1.4.0", + "_where": "/Users/orsagie/snyk-fixtures/monorepo-simple", + "author": { + "name": "Robert Kieffer", + "email": "robert@broofa.com" + }, + "bugs": { + "url": "https://github.com/broofa/node-uuid/issues" + }, + "bundleDependencies": false, + "contributors": [ + { + "name": "Christoph Tavan", + "email": "dev@tavan.de" + } + ], + "deprecated": "Use uuid module instead", + "description": "Rigorous implementation of RFC4122 (v1 and v4) UUIDs.", + "homepage": "https://github.com/broofa/node-uuid#readme", + "keywords": [ + "uuid", + "guid", + "rfc4122" + ], + "lib": ".", + "main": "./uuid.js", + "name": "node-uuid", + "repository": { + "type": "git", + "url": "git+https://github.com/broofa/node-uuid.git" + }, + "url": "http://github.com/broofa/node-uuid", + "version": "1.4.0" +} diff --git a/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/Readme.md b/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/Readme.md new file mode 100644 index 0000000000..78cbe24bd4 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/Readme.md @@ -0,0 +1,38 @@ + +# node-querystring + + query string parser for node supporting nesting, as it was removed from `0.3.x`, so this library provides the previous and commonly desired behaviour (and twice as fast). Used by [express](http://expressjs.com), [connect](http://senchalabs.github.com/connect) and others. + +## Installation + + $ npm install qs + +## Examples + + require('querystring').parse('user[name][first]=tj&user[email]=tj'); + // => { user: { name: { first: 'tj' }}} + +## License + +(The MIT License) + +Copyright (c) 2010 TJ Holowaychuk <tj@vision-media.ca> + +Permission is hereby granted, free of charge, to any person obtaining +a copy of this software and associated documentation files (the +'Software'), to deal in the Software without restriction, including +without limitation the rights to use, copy, modify, merge, publish, +distribute, sublicense, and/or sell copies of the Software, and to +permit persons to whom the Software is furnished to do so, subject to +the following conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. \ No newline at end of file diff --git a/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/package.json b/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/package.json new file mode 100644 index 0000000000..a03148ff13 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/node_modules/qs/package.json @@ -0,0 +1,40 @@ +{ + "_from": "qs@0.0.6", + "_id": "qs@0.0.6", + "_inBundle": false, + "_integrity": "sha1-SBZZt+W/al6omAEN5a7TXrRp4SQ=", + "_location": "/qs", + "_phantomChildren": {}, + "_requested": { + "type": "version", + "registry": true, + "raw": "qs@0.0.6", + "name": "qs", + "escapedName": "qs", + "rawSpec": "0.0.6", + "saveSpec": null, + "fetchSpec": "0.0.6" + }, + "_requiredBy": [ + "/" + ], + "_resolved": "https://registry.npmjs.org/qs/-/qs-0.0.6.tgz", + "_shasum": "481659b7e5bf6a5ea898010de5aed35eb469e124", + "_spec": "qs@0.0.6", + "_where": "/Users/orsagie/snyk-fixtures/monorepo-simple", + "author": { + "name": "TJ Holowaychuk", + "email": "tj@vision-media.ca", + "url": "http://tjholowaychuk.com" + }, + "bundleDependencies": false, + "deprecated": false, + "description": "querystring parser", + "engines": { + "node": "*" + }, + "main": "index", + "name": "qs", + "repository": {}, + "version": "0.0.6" +} diff --git a/test/acceptance/workspaces/mono-repo-nested/package-lock.json b/test/acceptance/workspaces/mono-repo-nested/package-lock.json new file mode 100644 index 0000000000..fce7bf7f28 --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/package-lock.json @@ -0,0 +1,18 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "lockfileVersion": 1, + "requires": true, + "dependencies": { + "node-uuid": { + "version": "1.4.0", + "resolved": "https://registry.npmjs.org/node-uuid/-/node-uuid-1.4.0.tgz", + "integrity": "sha1-B/myM3Vy/2J1x3Xh1IUT86RdemU=" + }, + "qs": { + "version": "0.0.6", + "resolved": "https://registry.npmjs.org/qs/-/qs-0.0.6.tgz", + "integrity": "sha1-SBZZt+W/al6omAEN5a7TXrRp4SQ=" + } + } +} diff --git a/test/acceptance/workspaces/mono-repo-nested/package.json b/test/acceptance/workspaces/mono-repo-nested/package.json new file mode 100644 index 0000000000..4b2ecd8d3d --- /dev/null +++ b/test/acceptance/workspaces/mono-repo-nested/package.json @@ -0,0 +1,14 @@ +{ + "name": "shallow-goof", + "version": "0.0.1", + "description": "A vulnerable demo application", + "homepage": "https://snyk.io/", + "repository": { + "type": "git", + "url": "https://github.com/Snyk/shallow-goof" + }, + "dependencies": { + "qs": "0.0.6", + "node-uuid": "1.4.0" + } +} diff --git a/test/acceptance/workspaces/npm-package-policy/.snyk b/test/acceptance/workspaces/npm-package-policy/.snyk index 4b3ffdbe64..46ef70e6b7 100644 --- a/test/acceptance/workspaces/npm-package-policy/.snyk +++ b/test/acceptance/workspaces/npm-package-policy/.snyk @@ -1,5 +1,5 @@ # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. -version: v1.25.0 +version: v1.25.1 # ignores vulnerabilities until expiry date; change duration by modifying expiry date ignore: 'npm:marked:20170907': diff --git a/test/acceptance/workspaces/npm-package-policy/custom-location/.snyk b/test/acceptance/workspaces/npm-package-policy/custom-location/.snyk index 3cb730509e..32c76f7837 100644 --- a/test/acceptance/workspaces/npm-package-policy/custom-location/.snyk +++ b/test/acceptance/workspaces/npm-package-policy/custom-location/.snyk @@ -1,5 +1,5 @@ # Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. -version: v1.25.0 +version: v1.25.1 # ignores vulnerabilities until expiry date; change duration by modifying expiry date ignore: 'npm:marked:20170907': diff --git a/test/fixtures/sast/no-vulnerabilities/index.ts b/test/fixtures/sast/no-vulnerabilities/index.ts new file mode 100644 index 0000000000..e028b461e0 --- /dev/null +++ b/test/fixtures/sast/no-vulnerabilities/index.ts @@ -0,0 +1,2 @@ +const message: string = 'Hello, World!'; +console.log(message); \ No newline at end of file diff --git a/test/fixtures/sast/with_code_issues/HashingAssignment.java b/test/fixtures/sast/with_code_issues/HashingAssignment.java new file mode 100644 index 0000000000..b5dc69fdcd --- /dev/null +++ b/test/fixtures/sast/with_code_issues/HashingAssignment.java @@ -0,0 +1,111 @@ +/* + * This file is part of WebGoat, an Open Web Application Security Project utility. For details, please see http://www.owasp.org/ + * + * Copyright (c) 2002 - 2019 Bruce Mayhew + * + * This program is free software; you can redistribute it and/or modify it under the terms of the + * GNU General Public License as published by the Free Software Foundation; either version 2 of the + * License, or (at your option) any later version. + * + * This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without + * even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + * General Public License for more details. + * + * You should have received a copy of the GNU General Public License along with this program; if + * not, write to the Free Software Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA + * 02111-1307, USA. + * + * Getting Source ============== + * + * Source for this application is maintained at https://github.com/WebGoat/WebGoat, a repository for free software projects. + */ + +package org.owasp.webgoat.crypto; + +import java.security.MessageDigest; +import java.security.NoSuchAlgorithmException; +import java.util.Random; + +import javax.servlet.http.HttpServletRequest; +import javax.xml.bind.DatatypeConverter; + +import org.owasp.webgoat.assignments.AssignmentEndpoint; +import org.owasp.webgoat.assignments.AssignmentHints; +import org.owasp.webgoat.assignments.AttackResult; +import org.springframework.http.MediaType; +import org.springframework.web.bind.annotation.PostMapping; +import org.springframework.web.bind.annotation.RequestMapping; +import org.springframework.web.bind.annotation.RequestParam; +import org.springframework.web.bind.annotation.ResponseBody; +import org.springframework.web.bind.annotation.RestController; + +@RestController +@AssignmentHints({"crypto-hashing.hints.1","crypto-hashing.hints.2"}) +public class HashingAssignment extends AssignmentEndpoint { + + public static final String[] SECRETS = {"secret","admin","password", "123456", "passw0rd"}; + + @RequestMapping(path="/crypto/hashing/md5",produces=MediaType.TEXT_HTML_VALUE) + @ResponseBody + public String getMd5(HttpServletRequest request) throws NoSuchAlgorithmException { + + String md5Hash = (String) request.getSession().getAttribute("md5Hash"); + if (md5Hash == null) { + + String secret = SECRETS[new Random().nextInt(SECRETS.length)]; + + MessageDigest md = MessageDigest.getInstance("MD5"); + md.update(secret.getBytes()); + byte[] digest = md.digest(); + md5Hash = DatatypeConverter + .printHexBinary(digest).toUpperCase(); + request.getSession().setAttribute("md5Hash", md5Hash); + request.getSession().setAttribute("md5Secret", secret); + } + return md5Hash; + } + + @RequestMapping(path="/crypto/hashing/sha256",produces=MediaType.TEXT_HTML_VALUE) + @ResponseBody + public String getSha256(HttpServletRequest request) throws NoSuchAlgorithmException { + + String sha256 = (String) request.getSession().getAttribute("sha256"); + if (sha256 == null) { + String secret = SECRETS[new Random().nextInt(SECRETS.length)]; + sha256 = getHash(secret, "SHA-256"); + request.getSession().setAttribute("sha256Hash", sha256); + request.getSession().setAttribute("sha256Secret", secret); + } + return sha256; + } + + @PostMapping("/crypto/hashing") + @ResponseBody + public AttackResult completed(HttpServletRequest request, @RequestParam String answer_pwd1, @RequestParam String answer_pwd2) { + + String md5Secret = (String) request.getSession().getAttribute("md5Secret"); + String sha256Secret = (String) request.getSession().getAttribute("sha256Secret"); + + if (answer_pwd1!=null && answer_pwd2 !=null) { + if (answer_pwd1.equals(md5Secret) + && answer_pwd2.equals(sha256Secret)) { + return success(this) + .feedback("crypto-hashing.success") + .build(); + } else if (answer_pwd1.equals(md5Secret) + || answer_pwd2.equals(sha256Secret)) { + return failed(this).feedback("crypto-hashing.oneok").build(); + } + } + return failed(this).feedback("crypto-hashing.empty").build(); + } + + public static String getHash(String secret, String algorithm) throws NoSuchAlgorithmException { + MessageDigest md = MessageDigest.getInstance(algorithm); + md.update(secret.getBytes()); + byte[] digest = md.digest(); + return DatatypeConverter + .printHexBinary(digest).toUpperCase(); + } + +} diff --git a/test/fixtures/snyk-code/sarif-schema.json b/test/fixtures/snyk-code/sarif-schema.json new file mode 100644 index 0000000000..97c4c5924d --- /dev/null +++ b/test/fixtures/snyk-code/sarif-schema.json @@ -0,0 +1,3310 @@ +{ + "type": "object", + "properties": { + "$schema": { + "description": "The URI of the JSON schema corresponding to the version.", + "type": "string", + "format": "uri" + }, + + "version": { + "description": "The SARIF format version of this log file.", + "enum": ["2.1.0"], + "type": "string" + }, + + "runs": { + "description": "The set of runs contained in this log file.", + "type": ["array", "null"], + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/run" + } + }, + + "inlineExternalProperties": { + "description": "References to external property files that share data between runs.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/externalProperties" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the log file.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["version", "runs"], + + "definitions": { + "address": { + "description": "A physical or virtual address, or a range of addresses, in an 'addressable region' (memory or a binary file).", + "additionalProperties": false, + "type": "object", + "properties": { + "absoluteAddress": { + "description": "The address expressed as a byte offset from the start of the addressable region.", + "type": "integer", + "minimum": -1, + "default": -1 + }, + + "relativeAddress": { + "description": "The address expressed as a byte offset from the absolute address of the top-most parent object.", + "type": "integer" + }, + + "length": { + "description": "The number of bytes in this range of addresses.", + "type": "integer" + }, + + "kind": { + "description": "An open-ended string that identifies the address kind. 'data', 'function', 'header','instruction', 'module', 'page', 'section', 'segment', 'stack', 'stackFrame', 'table' are well-known values.", + "type": "string" + }, + + "name": { + "description": "A name that is associated with the address, e.g., '.text'.", + "type": "string" + }, + + "fullyQualifiedName": { + "description": "A human-readable fully qualified name that is associated with the address.", + "type": "string" + }, + + "offsetFromParent": { + "description": "The byte offset of this address from the absolute or relative address of the parent object.", + "type": "integer" + }, + + "index": { + "description": "The index within run.addresses of the cached object for this address.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "parentIndex": { + "description": "The index within run.addresses of the parent object.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the address.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "artifact": { + "description": "A single artifact. In some cases, this artifact might be nested within another artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A short description of the artifact.", + "$ref": "#/definitions/message" + }, + + "location": { + "description": "The location of the artifact.", + "$ref": "#/definitions/artifactLocation" + }, + + "parentIndex": { + "description": "Identifies the index of the immediate parent of the artifact, if this artifact is nested.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "offset": { + "description": "The offset in bytes of the artifact within its containing artifact.", + "type": "integer", + "minimum": 0 + }, + + "length": { + "description": "The length of the artifact in bytes.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "roles": { + "description": "The role or roles played by the artifact in the analysis.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "enum": [ + "analysisTarget", + "attachment", + "responseFile", + "resultFile", + "standardStream", + "tracedFile", + "unmodified", + "modified", + "added", + "deleted", + "renamed", + "uncontrolled", + "driver", + "extension", + "translation", + "taxonomy", + "policy", + "referencedOnCommandLine", + "memoryContents", + "directory", + "userSpecifiedConfiguration", + "toolSpecifiedConfiguration", + "debugOutputFile" + ], + "type": "string" + } + }, + + "mimeType": { + "description": "The MIME type (RFC 2045) of the artifact.", + "type": "string", + "pattern": "[^/]+/.+" + }, + + "contents": { + "description": "The contents of the artifact.", + "$ref": "#/definitions/artifactContent" + }, + + "encoding": { + "description": "Specifies the encoding for an artifact object that refers to a text file.", + "type": "string" + }, + + "sourceLanguage": { + "description": "Specifies the source language for any artifact object that refers to a text file that contains source code.", + "type": "string" + }, + + "hashes": { + "description": "A dictionary, each of whose keys is the name of a hash function and each of whose values is the hashed value of the artifact produced by the specified hash function.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "lastModifiedTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the artifact was most recently modified. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the artifact.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "artifactChange": { + "description": "A change to a single artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "artifactLocation": { + "description": "The location of the artifact to change.", + "$ref": "#/definitions/artifactLocation" + }, + + "replacements": { + "description": "An array of replacement objects, each of which represents the replacement of a single region in a single artifact specified by 'artifactLocation'.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/replacement" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the change.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["artifactLocation", "replacements"] + }, + + "artifactContent": { + "description": "Represents the contents of an artifact.", + "type": "object", + "additionalProperties": false, + "properties": { + "text": { + "description": "UTF-8-encoded content from a text artifact.", + "type": "string" + }, + + "binary": { + "description": "MIME Base64-encoded content from a binary artifact, or from a text artifact in its original encoding.", + "type": "string" + }, + + "rendered": { + "description": "An alternate rendered representation of the artifact (e.g., a decompiled representation of a binary region).", + "$ref": "#/definitions/multiformatMessageString" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the artifact content.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "artifactLocation": { + "description": "Specifies the location of an artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "uri": { + "description": "A string containing a valid relative or absolute URI.", + "type": "string", + "format": "uri-reference" + }, + + "uriBaseId": { + "description": "A string which indirectly specifies the absolute URI with respect to which a relative URI in the \"uri\" property is interpreted.", + "type": "string" + }, + + "index": { + "description": "The index within the run artifacts array of the artifact object associated with the artifact location.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "description": { + "description": "A short description of the artifact location.", + "$ref": "#/definitions/message" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the artifact location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "attachment": { + "description": "An artifact relevant to a result.", + "type": "object", + "additionalProperties": false, + "properties": { + "description": { + "description": "A message describing the role played by the attachment.", + "$ref": "#/definitions/message" + }, + + "artifactLocation": { + "description": "The location of the attachment.", + "$ref": "#/definitions/artifactLocation" + }, + + "regions": { + "description": "An array of regions of interest within the attachment.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/region" + } + }, + + "rectangles": { + "description": "An array of rectangles specifying areas of interest within the image.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/rectangle" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the attachment.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["artifactLocation"] + }, + + "codeFlow": { + "description": "A set of threadFlows which together describe a pattern of code execution relevant to detecting a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "message": { + "description": "A message relevant to the code flow.", + "$ref": "#/definitions/message" + }, + + "threadFlows": { + "description": "An array of one or more unique threadFlow objects, each of which describes the progress of a program through a thread of execution.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/threadFlow" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the code flow.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["threadFlows"] + }, + + "configurationOverride": { + "description": "Information about how a specific rule or notification was reconfigured at runtime.", + "type": "object", + "additionalProperties": false, + "properties": { + "configuration": { + "description": "Specifies how the rule or notification was configured during the scan.", + "$ref": "#/definitions/reportingConfiguration" + }, + + "descriptor": { + "description": "A reference used to locate the descriptor whose configuration was overridden.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the configuration override.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["configuration", "descriptor"] + }, + + "conversion": { + "description": "Describes how a converter transformed the output of a static analysis tool from the analysis tool's native output format into the SARIF format.", + "additionalProperties": false, + "type": "object", + "properties": { + "tool": { + "description": "A tool object that describes the converter.", + "$ref": "#/definitions/tool" + }, + + "invocation": { + "description": "An invocation object that describes the invocation of the converter.", + "$ref": "#/definitions/invocation" + }, + + "analysisToolLogFiles": { + "description": "The locations of the analysis tool's per-run log files.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the conversion.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["tool"] + }, + + "edge": { + "description": "Represents a directed edge in a graph.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "A string that uniquely identifies the edge within its graph.", + "type": "string" + }, + + "label": { + "description": "A short description of the edge.", + "$ref": "#/definitions/message" + }, + + "sourceNodeId": { + "description": "Identifies the source node (the node at which the edge starts).", + "type": "string" + }, + + "targetNodeId": { + "description": "Identifies the target node (the node at which the edge ends).", + "type": "string" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the edge.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["id", "sourceNodeId", "targetNodeId"] + }, + + "edgeTraversal": { + "description": "Represents the traversal of a single edge during a graph traversal.", + "type": "object", + "additionalProperties": false, + "properties": { + "edgeId": { + "description": "Identifies the edge being traversed.", + "type": "string" + }, + + "message": { + "description": "A message to display to the user as the edge is traversed.", + "$ref": "#/definitions/message" + }, + + "finalState": { + "description": "The values of relevant expressions after the edge has been traversed.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "stepOverEdgeCount": { + "description": "The number of edge traversals necessary to return from a nested graph.", + "type": "integer", + "minimum": 0 + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the edge traversal.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["edgeId"] + }, + + "exception": { + "description": "Describes a runtime exception encountered during the execution of an analysis tool.", + "type": "object", + "additionalProperties": false, + "properties": { + "kind": { + "type": "string", + "description": "A string that identifies the kind of exception, for example, the fully qualified type name of an object that was thrown, or the symbolic name of a signal." + }, + + "message": { + "description": "A message that describes the exception.", + "type": "string" + }, + + "stack": { + "description": "The sequence of function calls leading to the exception.", + "$ref": "#/definitions/stack" + }, + + "innerExceptions": { + "description": "An array of exception objects each of which is considered a cause of this exception.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/exception" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the exception.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "externalProperties": { + "description": "The top-level element of an external property file.", + "type": "object", + "additionalProperties": false, + "properties": { + "schema": { + "description": "The URI of the JSON schema corresponding to the version of the external property file format.", + "type": "string", + "format": "uri" + }, + + "version": { + "description": "The SARIF format version of this external properties object.", + "enum": ["2.1.0"], + "type": "string" + }, + + "guid": { + "description": "A stable, unique identifier for this external properties object, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "runGuid": { + "description": "A stable, unique identifier for the run associated with this external properties object, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "conversion": { + "description": "A conversion object that will be merged with a separate run.", + "$ref": "#/definitions/conversion" + }, + + "graphs": { + "description": "An array of graph objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/graph" + } + }, + + "externalizedProperties": { + "description": "Key/value pairs that provide additional information that will be merged with a separate run.", + "$ref": "#/definitions/propertyBag" + }, + + "artifacts": { + "description": "An array of artifact objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifact" + } + }, + + "invocations": { + "description": "Describes the invocation of the analysis tool that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/invocation" + } + }, + + "logicalLocations": { + "description": "An array of logical locations such as namespaces, types or functions that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + + "threadFlowLocations": { + "description": "An array of threadFlowLocation objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + + "results": { + "description": "An array of result objects that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/result" + } + }, + + "taxonomies": { + "description": "Tool taxonomies that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "driver": { + "description": "The analysis tool object that will be merged with a separate run.", + "$ref": "#/definitions/toolComponent" + }, + + "extensions": { + "description": "Tool extensions that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "policies": { + "description": "Tool policies that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "translations": { + "description": "Tool translations that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "addresses": { + "description": "Addresses that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/address" + } + }, + + "webRequests": { + "description": "Requests that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webRequest" + } + }, + + "webResponses": { + "description": "Responses that will be merged with a separate run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webResponse" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the external properties.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "externalPropertyFileReference": { + "description": "Contains information that enables a SARIF consumer to locate the external property file that contains the value of an externalized property associated with the run.", + "type": "object", + "additionalProperties": false, + "properties": { + "location": { + "description": "The location of the external property file.", + "$ref": "#/definitions/artifactLocation" + }, + + "guid": { + "description": "A stable, unique identifier for the external property file in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "itemCount": { + "description": "A non-negative integer specifying the number of items contained in the external property file.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the external property file.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [{ "required": ["location"] }, { "required": ["guid"] }] + }, + + "externalPropertyFileReferences": { + "description": "References to external property files that should be inlined with the content of a root log file.", + "additionalProperties": false, + "type": "object", + "properties": { + "conversion": { + "description": "An external property file containing a run.conversion object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + + "graphs": { + "description": "An array of external property files containing a run.graphs object to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "externalizedProperties": { + "description": "An external property file containing a run.properties object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + + "artifacts": { + "description": "An array of external property files containing run.artifacts arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "invocations": { + "description": "An array of external property files containing run.invocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "logicalLocations": { + "description": "An array of external property files containing run.logicalLocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "threadFlowLocations": { + "description": "An array of external property files containing run.threadFlowLocations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "results": { + "description": "An array of external property files containing run.results arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "taxonomies": { + "description": "An array of external property files containing run.taxonomies arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "addresses": { + "description": "An array of external property files containing run.addresses arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "driver": { + "description": "An external property file containing a run.driver object to be merged with the root log file.", + "$ref": "#/definitions/externalPropertyFileReference" + }, + + "extensions": { + "description": "An array of external property files containing run.extensions arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "policies": { + "description": "An array of external property files containing run.policies arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "translations": { + "description": "An array of external property files containing run.translations arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "webRequests": { + "description": "An array of external property files containing run.requests arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "webResponses": { + "description": "An array of external property files containing run.responses arrays to be merged with the root log file.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/externalPropertyFileReference" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the external property files.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "fix": { + "description": "A proposed fix for the problem represented by a result object. A fix specifies a set of artifacts to modify. For each artifact, it specifies a set of bytes to remove, and provides a set of new bytes to replace them.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A message that describes the proposed fix, enabling viewers to present the proposed change to an end user.", + "$ref": "#/definitions/message" + }, + + "artifactChanges": { + "description": "One or more artifact changes that comprise a fix for a result.", + "type": "array", + "minItems": 1, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifactChange" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the fix.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["artifactChanges"] + }, + + "graph": { + "description": "A network of nodes and directed edges that describes some aspect of the structure of the code (for example, a call graph).", + "type": "object", + "additionalProperties": false, + "properties": { + "description": { + "description": "A description of the graph.", + "$ref": "#/definitions/message" + }, + + "nodes": { + "description": "An array of node objects representing the nodes of the graph.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/node" + } + }, + + "edges": { + "description": "An array of edge objects representing the edges of the graph.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/edge" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the graph.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "graphTraversal": { + "description": "Represents a path through a graph.", + "type": "object", + "additionalProperties": false, + "properties": { + "runGraphIndex": { + "description": "The index within the run.graphs to be associated with the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "resultGraphIndex": { + "description": "The index within the result.graphs to be associated with the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "description": { + "description": "A description of this graph traversal.", + "$ref": "#/definitions/message" + }, + + "initialState": { + "description": "Values of relevant expressions at the start of the graph traversal that may change during graph traversal.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "immutableState": { + "description": "Values of relevant expressions at the start of the graph traversal that remain constant for the graph traversal.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "edgeTraversals": { + "description": "The sequences of edges traversed by this graph traversal.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/edgeTraversal" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the graph traversal.", + "$ref": "#/definitions/propertyBag" + } + }, + "oneOf": [ + { "required": ["runGraphIndex"] }, + { "required": ["resultGraphIndex"] } + ] + }, + + "invocation": { + "description": "The runtime environment of the analysis tool run.", + "additionalProperties": false, + "type": "object", + "properties": { + "commandLine": { + "description": "The command line used to invoke the tool.", + "type": "string" + }, + + "arguments": { + "description": "An array of strings, containing in order the command line arguments passed to the tool from the operating system.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "type": "string" + } + }, + + "responseFiles": { + "description": "The locations of any response files specified on the tool's command line.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + + "startTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the invocation started. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + + "endTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the invocation ended. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + + "exitCode": { + "description": "The process exit code.", + "type": "integer" + }, + + "ruleConfigurationOverrides": { + "description": "An array of configurationOverride objects that describe rules related runtime overrides.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/configurationOverride" + } + }, + + "notificationConfigurationOverrides": { + "description": "An array of configurationOverride objects that describe notifications related runtime overrides.", + "type": "array", + "minItems": 0, + "default": [], + "uniqueItems": true, + "items": { + "$ref": "#/definitions/configurationOverride" + } + }, + + "toolExecutionNotifications": { + "description": "A list of runtime conditions detected by the tool during the analysis.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/notification" + } + }, + + "toolConfigurationNotifications": { + "description": "A list of conditions detected by the tool that are relevant to the tool's configuration.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/notification" + } + }, + + "exitCodeDescription": { + "description": "The reason for the process exit.", + "type": "string" + }, + + "exitSignalName": { + "description": "The name of the signal that caused the process to exit.", + "type": "string" + }, + + "exitSignalNumber": { + "description": "The numeric value of the signal that caused the process to exit.", + "type": "integer" + }, + + "processStartFailureMessage": { + "description": "The reason given by the operating system that the process failed to start.", + "type": "string" + }, + + "executionSuccessful": { + "description": "Specifies whether the tool's execution completed successfully.", + "type": "boolean" + }, + + "machine": { + "description": "The machine on which the invocation occurred.", + "type": "string" + }, + + "account": { + "description": "The account under which the invocation occurred.", + "type": "string" + }, + + "processId": { + "description": "The id of the process in which the invocation occurred.", + "type": "integer" + }, + + "executableLocation": { + "description": "An absolute URI specifying the location of the executable that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + + "workingDirectory": { + "description": "The working directory for the invocation.", + "$ref": "#/definitions/artifactLocation" + }, + + "environmentVariables": { + "description": "The environment variables associated with the analysis tool process, expressed as key/value pairs.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "stdin": { + "description": "A file containing the standard input stream to the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + + "stdout": { + "description": "A file containing the standard output stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + + "stderr": { + "description": "A file containing the standard error stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + + "stdoutStderr": { + "description": "A file containing the interleaved standard output and standard error stream from the process that was invoked.", + "$ref": "#/definitions/artifactLocation" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the invocation.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["executionSuccessful"] + }, + + "location": { + "description": "A location within a programming artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "id": { + "description": "Value that distinguishes this location from all other locations within a single result object.", + "type": "integer", + "minimum": -1, + "default": -1 + }, + + "physicalLocation": { + "description": "Identifies the artifact and region.", + "$ref": "#/definitions/physicalLocation" + }, + + "logicalLocations": { + "description": "The logical locations associated with the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + + "message": { + "description": "A message relevant to the location.", + "$ref": "#/definitions/message" + }, + + "annotations": { + "description": "A set of regions relevant to the location.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/region" + } + }, + + "relationships": { + "description": "An array of objects that describe relationships between this location and others.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/locationRelationship" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "locationRelationship": { + "description": "Information about the relation of one location to another.", + "type": "object", + "additionalProperties": false, + "properties": { + "target": { + "description": "A reference to the related location.", + "type": "integer", + "minimum": 0 + }, + + "kinds": { + "description": "A set of distinct strings that categorize the relationship. Well-known kinds include 'includes', 'isIncludedBy' and 'relevant'.", + "type": "array", + "default": ["relevant"], + "uniqueItems": true, + "items": { + "type": "string" + } + }, + + "description": { + "description": "A description of the location relationship.", + "$ref": "#/definitions/message" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the location relationship.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["target"] + }, + + "logicalLocation": { + "description": "A logical location of a construct that produced a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "name": { + "description": "Identifies the construct in which the result occurred. For example, this property might contain the name of a class or a method.", + "type": "string" + }, + + "index": { + "description": "The index within the logical locations array.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "fullyQualifiedName": { + "description": "The human-readable fully qualified name of the logical location.", + "type": "string" + }, + + "decoratedName": { + "description": "The machine-readable name for the logical location, such as a mangled function name provided by a C++ compiler that encodes calling convention, return type and other details along with the function name.", + "type": "string" + }, + + "parentIndex": { + "description": "Identifies the index of the immediate parent of the construct in which the result was detected. For example, this property might point to a logical location that represents the namespace that holds a type.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "kind": { + "description": "The type of construct this logical location component refers to. Should be one of 'function', 'member', 'module', 'namespace', 'parameter', 'resource', 'returnType', 'type', 'variable', 'object', 'array', 'property', 'value', 'element', 'text', 'attribute', 'comment', 'declaration', 'dtd' or 'processingInstruction', if any of those accurately describe the construct.", + "type": "string" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the logical location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "message": { + "description": "Encapsulates a message intended to be read by the end user.", + "type": "object", + "additionalProperties": false, + + "properties": { + "text": { + "description": "A plain text message string.", + "type": "string" + }, + + "markdown": { + "description": "A Markdown message string.", + "type": "string" + }, + + "id": { + "description": "The identifier for this message.", + "type": "string" + }, + + "arguments": { + "description": "An array of strings to substitute into the message string.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "type": "string" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the message.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [{ "required": ["text"] }, { "required": ["id"] }] + }, + + "multiformatMessageString": { + "description": "A message string or message format string rendered in multiple formats.", + "type": "object", + "additionalProperties": false, + + "properties": { + "text": { + "description": "A plain text message string or format string.", + "type": "string" + }, + + "markdown": { + "description": "A Markdown message string or format string.", + "type": "string" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the message.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["text"] + }, + + "node": { + "description": "Represents a node in a graph.", + "type": "object", + "additionalProperties": false, + + "properties": { + "id": { + "description": "A string that uniquely identifies the node within its graph.", + "type": "string" + }, + + "label": { + "description": "A short description of the node.", + "$ref": "#/definitions/message" + }, + + "location": { + "description": "A code location associated with the node.", + "$ref": "#/definitions/location" + }, + + "children": { + "description": "Array of child nodes.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/node" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the node.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["id"] + }, + + "notification": { + "description": "Describes a condition relevant to the tool itself, as opposed to being relevant to a target being analyzed by the tool.", + "type": "object", + "additionalProperties": false, + "properties": { + "locations": { + "description": "The locations relevant to this notification.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + + "message": { + "description": "A message that describes the condition that was encountered.", + "$ref": "#/definitions/message" + }, + + "level": { + "description": "A value specifying the severity level of the notification.", + "default": "warning", + "enum": ["none", "note", "warning", "error"], + "type": "string" + }, + + "threadId": { + "description": "The thread identifier of the code that generated the notification.", + "type": "integer" + }, + + "timeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the analysis tool generated the notification.", + "type": "string", + "format": "date-time" + }, + + "exception": { + "description": "The runtime exception, if any, relevant to this notification.", + "$ref": "#/definitions/exception" + }, + + "descriptor": { + "description": "A reference used to locate the descriptor relevant to this notification.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + + "associatedRule": { + "description": "A reference used to locate the rule descriptor associated with this notification.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the notification.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["message"] + }, + + "physicalLocation": { + "description": "A physical location relevant to a result. Specifies a reference to a programming artifact together with a range of bytes or characters within that artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "address": { + "description": "The address of the location.", + "$ref": "#/definitions/address" + }, + + "artifactLocation": { + "description": "The location of the artifact.", + "$ref": "#/definitions/artifactLocation" + }, + + "region": { + "description": "Specifies a portion of the artifact.", + "$ref": "#/definitions/region" + }, + + "contextRegion": { + "description": "Specifies a portion of the artifact that encloses the region. Allows a viewer to display additional context around the region.", + "$ref": "#/definitions/region" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the physical location.", + "$ref": "#/definitions/propertyBag" + } + }, + + "anyOf": [ + { + "required": ["address"] + }, + { + "required": ["artifactLocation"] + } + ] + }, + + "propertyBag": { + "description": "Key/value pairs that provide additional information about the object.", + "type": "object", + "additionalProperties": true, + "properties": { + "tags": { + "description": "A set of distinct strings that provide additional information.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + } + } + }, + + "rectangle": { + "description": "An area within an image.", + "additionalProperties": false, + "type": "object", + "properties": { + "top": { + "description": "The Y coordinate of the top edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + + "left": { + "description": "The X coordinate of the left edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + + "bottom": { + "description": "The Y coordinate of the bottom edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + + "right": { + "description": "The X coordinate of the right edge of the rectangle, measured in the image's natural units.", + "type": "number" + }, + + "message": { + "description": "A message relevant to the rectangle.", + "$ref": "#/definitions/message" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the rectangle.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "region": { + "description": "A region within an artifact where a result was detected.", + "additionalProperties": false, + "type": "object", + "properties": { + "startLine": { + "description": "The line number of the first character in the region.", + "type": "integer", + "minimum": 1 + }, + + "startColumn": { + "description": "The column number of the first character in the region.", + "type": "integer", + "minimum": 1 + }, + + "endLine": { + "description": "The line number of the last character in the region.", + "type": "integer", + "minimum": 1 + }, + + "endColumn": { + "description": "The column number of the character following the end of the region.", + "type": "integer", + "minimum": 1 + }, + + "charOffset": { + "description": "The zero-based offset from the beginning of the artifact of the first character in the region.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "charLength": { + "description": "The length of the region in characters.", + "type": "integer", + "minimum": 0 + }, + + "byteOffset": { + "description": "The zero-based offset from the beginning of the artifact of the first byte in the region.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "byteLength": { + "description": "The length of the region in bytes.", + "type": "integer", + "minimum": 0 + }, + + "snippet": { + "description": "The portion of the artifact contents within the specified region.", + "$ref": "#/definitions/artifactContent" + }, + + "message": { + "description": "A message relevant to the region.", + "$ref": "#/definitions/message" + }, + + "sourceLanguage": { + "description": "Specifies the source language, if any, of the portion of the artifact specified by the region object.", + "type": "string" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the region.", + "$ref": "#/definitions/propertyBag" + } + }, + + "anyOf": [ + { "required": ["startLine"] }, + { "required": ["charOffset"] }, + { "required": ["byteOffset"] } + ] + }, + + "replacement": { + "description": "The replacement of a single region of an artifact.", + "additionalProperties": false, + "type": "object", + "properties": { + "deletedRegion": { + "description": "The region of the artifact to delete.", + "$ref": "#/definitions/region" + }, + + "insertedContent": { + "description": "The content to insert at the location specified by the 'deletedRegion' property.", + "$ref": "#/definitions/artifactContent" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the replacement.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["deletedRegion"] + }, + + "reportingDescriptor": { + "description": "Metadata that describes a specific report produced by the tool, as part of the analysis it provides or its runtime reporting.", + "additionalProperties": false, + "type": "object", + "properties": { + "id": { + "description": "A stable, opaque identifier for the report.", + "type": "string" + }, + + "deprecatedIds": { + "description": "An array of stable, opaque identifiers by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + + "guid": { + "description": "A unique identifier for the reporting descriptor in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "deprecatedGuids": { + "description": "An array of unique identifies in the form of a GUID by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + } + }, + + "name": { + "description": "A report identifier that is understandable to an end user.", + "type": "string" + }, + + "deprecatedNames": { + "description": "An array of readable identifiers by which this report was known in some previous version of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string" + } + }, + + "shortDescription": { + "description": "A concise description of the report. Should be a single sentence that is understandable when visible space is limited to a single line of text.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "fullDescription": { + "description": "A description of the report. Should, as far as possible, provide details sufficient to enable resolution of any problem indicated by the result.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "messageStrings": { + "description": "A set of name/value pairs with arbitrary names. Each value is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "defaultConfiguration": { + "description": "Default reporting configuration information.", + "$ref": "#/definitions/reportingConfiguration" + }, + + "helpUri": { + "description": "A URI where the primary documentation for the report can be found.", + "type": "string", + "format": "uri" + }, + + "help": { + "description": "Provides the primary documentation for the report, useful when there is no online documentation.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "relationships": { + "description": "An array of objects that describe relationships between this reporting descriptor and others.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorRelationship" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the report.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["id"] + }, + + "reportingConfiguration": { + "description": "Information about a rule or notification that can be configured at runtime.", + "type": "object", + "additionalProperties": false, + "properties": { + "enabled": { + "description": "Specifies whether the report may be produced during the scan.", + "type": "boolean", + "default": true + }, + + "level": { + "description": "Specifies the failure level for the report.", + "default": "warning", + "enum": ["none", "note", "warning", "error"], + "type": "string" + }, + + "rank": { + "description": "Specifies the relative priority of the report. Used for analysis output only.", + "type": "number", + "default": -1.0, + "minimum": -1.0, + "maximum": 100.0 + }, + + "parameters": { + "description": "Contains configuration information specific to a report.", + "$ref": "#/definitions/propertyBag" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the reporting configuration.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "reportingDescriptorReference": { + "description": "Information about how to locate a relevant reporting descriptor.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "The id of the descriptor.", + "type": "string" + }, + + "index": { + "description": "The index into an array of descriptors in toolComponent.ruleDescriptors, toolComponent.notificationDescriptors, or toolComponent.taxonomyDescriptors, depending on context.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "guid": { + "description": "A guid that uniquely identifies the descriptor.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "toolComponent": { + "description": "A reference used to locate the toolComponent associated with the descriptor.", + "$ref": "#/definitions/toolComponentReference" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the reporting descriptor reference.", + "$ref": "#/definitions/propertyBag" + } + }, + "anyOf": [ + { "required": ["index"] }, + { "required": ["guid"] }, + { "required": ["id"] } + ] + }, + + "reportingDescriptorRelationship": { + "description": "Information about the relation of one reporting descriptor to another.", + "type": "object", + "additionalProperties": false, + "properties": { + "target": { + "description": "A reference to the related reporting descriptor.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + + "kinds": { + "description": "A set of distinct strings that categorize the relationship. Well-known kinds include 'canPrecede', 'canFollow', 'willPrecede', 'willFollow', 'superset', 'subset', 'equal', 'disjoint', 'relevant', and 'incomparable'.", + "type": "array", + "default": ["relevant"], + "uniqueItems": true, + "items": { + "type": "string" + } + }, + + "description": { + "description": "A description of the reporting descriptor relationship.", + "$ref": "#/definitions/message" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the reporting descriptor reference.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["target"] + }, + + "result": { + "description": "A result produced by an analysis tool.", + "additionalProperties": false, + "type": "object", + "properties": { + "ruleId": { + "description": "The stable, unique identifier of the rule, if any, to which this result is relevant.", + "type": "string" + }, + + "ruleIndex": { + "description": "The index within the tool component rules array of the rule object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "rule": { + "description": "A reference used to locate the rule descriptor relevant to this result.", + "$ref": "#/definitions/reportingDescriptorReference" + }, + + "kind": { + "description": "A value that categorizes results by evaluation state.", + "default": "fail", + "enum": [ + "notApplicable", + "pass", + "fail", + "review", + "open", + "informational" + ], + "type": "string" + }, + + "level": { + "description": "A value specifying the severity level of the result.", + "default": "warning", + "enum": ["none", "note", "warning", "error"], + "type": "string" + }, + + "message": { + "description": "A message that describes the result. The first sentence of the message only will be displayed when visible space is limited.", + "$ref": "#/definitions/message" + }, + + "analysisTarget": { + "description": "Identifies the artifact that the analysis tool was instructed to scan. This need not be the same as the artifact where the result actually occurred.", + "$ref": "#/definitions/artifactLocation" + }, + + "locations": { + "description": "The set of locations where the result was detected. Specify only one location unless the problem indicated by the result can only be corrected by making a change at every specified location.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + + "guid": { + "description": "A stable, unique identifier for the result in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "correlationGuid": { + "description": "A stable, unique identifier for the equivalence class of logically identical results to which this result belongs, in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "occurrenceCount": { + "description": "A positive integer specifying the number of times this logically unique result was observed in this run.", + "type": "integer", + "minimum": 1 + }, + + "partialFingerprints": { + "description": "A set of strings that contribute to the stable, unique identity of the result.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "fingerprints": { + "description": "A set of strings each of which individually defines a stable, unique identity for the result.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "stacks": { + "description": "An array of 'stack' objects relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/stack" + } + }, + + "codeFlows": { + "description": "An array of 'codeFlow' objects relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/codeFlow" + } + }, + + "graphs": { + "description": "An array of zero or more unique graph objects associated with the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graph" + } + }, + + "graphTraversals": { + "description": "An array of one or more unique 'graphTraversal' objects.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graphTraversal" + } + }, + + "relatedLocations": { + "description": "A set of locations relevant to this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/location" + } + }, + + "suppressions": { + "description": "A set of suppressions relevant to this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/suppression" + } + }, + + "baselineState": { + "description": "The state of a result relative to a baseline of a previous run.", + "enum": ["new", "unchanged", "updated", "absent"], + "type": "string" + }, + + "rank": { + "description": "A number representing the priority or importance of the result.", + "type": "number", + "default": -1.0, + "minimum": -1.0, + "maximum": 100.0 + }, + + "attachments": { + "description": "A set of artifacts relevant to the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/attachment" + } + }, + + "hostedViewerUri": { + "description": "An absolute URI at which the result can be viewed.", + "type": "string", + "format": "uri" + }, + + "workItemUris": { + "description": "The URIs of the work items associated with this result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "type": "string", + "format": "uri" + } + }, + + "provenance": { + "description": "Information about how and when the result was detected.", + "$ref": "#/definitions/resultProvenance" + }, + + "fixes": { + "description": "An array of 'fix' objects, each of which represents a proposed fix to the problem indicated by the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/fix" + } + }, + + "taxa": { + "description": "An array of references to taxonomy reporting descriptors that are applicable to the result.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorReference" + } + }, + + "webRequest": { + "description": "A web request associated with this result.", + "$ref": "#/definitions/webRequest" + }, + + "webResponse": { + "description": "A web response associated with this result.", + "$ref": "#/definitions/webResponse" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the result.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["message"] + }, + + "resultProvenance": { + "description": "Contains information about how and when a result was detected.", + "additionalProperties": false, + "type": "object", + "properties": { + "firstDetectionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the result was first detected. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + + "lastDetectionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which the result was most recently detected. See \"Date/time properties\" in the SARIF spec for the required format.", + "type": "string", + "format": "date-time" + }, + + "firstDetectionRunGuid": { + "description": "A GUID-valued string equal to the automationDetails.guid property of the run in which the result was first detected.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "lastDetectionRunGuid": { + "description": "A GUID-valued string equal to the automationDetails.guid property of the run in which the result was most recently detected.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "invocationIndex": { + "description": "The index within the run.invocations array of the invocation object which describes the tool invocation that detected the result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "conversionSources": { + "description": "An array of physicalLocation objects which specify the portions of an analysis tool's output that a converter transformed into the result.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/physicalLocation" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the result.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "run": { + "description": "Describes a single run of an analysis tool, and contains the reported output of that run.", + "additionalProperties": false, + "type": "object", + "properties": { + "tool": { + "description": "Information about the tool or tool pipeline that generated the results in this run. A run can only contain results produced by a single tool or tool pipeline. A run can aggregate results from multiple log files, as long as context around the tool run (tool command-line arguments and the like) is identical for all aggregated files.", + "$ref": "#/definitions/tool" + }, + + "invocations": { + "description": "Describes the invocation of the analysis tool.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/invocation" + } + }, + + "conversion": { + "description": "A conversion object that describes how a converter transformed an analysis tool's native reporting format into the SARIF format.", + "$ref": "#/definitions/conversion" + }, + + "language": { + "description": "The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase culture code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646).", + "type": "string", + "default": "en-US", + "pattern": "^[a-zA-Z]{2}(-[a-zA-Z]{2})?$" + }, + + "versionControlProvenance": { + "description": "Specifies the revision in version control of the artifacts that were scanned.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/versionControlDetails" + } + }, + + "originalUriBaseIds": { + "description": "The artifact location specified by each uriBaseId symbol on the machine where the tool originally ran.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/artifactLocation" + } + }, + + "artifacts": { + "description": "An array of artifact objects relevant to the run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/artifact" + } + }, + + "logicalLocations": { + "description": "An array of logical locations such as namespaces, types or functions.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/logicalLocation" + } + }, + + "graphs": { + "description": "An array of zero or more unique graph objects associated with the run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/graph" + } + }, + + "results": { + "description": "The set of results contained in an SARIF log. The results array can be omitted when a run is solely exporting rules metadata. It must be present (but may be empty) if a log file represents an actual scan.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/result" + } + }, + + "automationDetails": { + "description": "Automation details that describe this run.", + "$ref": "#/definitions/runAutomationDetails" + }, + + "runAggregates": { + "description": "Automation details that describe the aggregate of runs to which this run belongs.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/runAutomationDetails" + } + }, + + "baselineGuid": { + "description": "The 'guid' property of a previous SARIF 'run' that comprises the baseline that was used to compute result 'baselineState' properties for the run.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "redactionTokens": { + "description": "An array of strings used to replace sensitive information in a redaction-aware property.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + }, + + "defaultEncoding": { + "description": "Specifies the default encoding for any artifact object that refers to a text file.", + "type": "string" + }, + + "defaultSourceLanguage": { + "description": "Specifies the default source language for any artifact object that refers to a text file that contains source code.", + "type": "string" + }, + + "newlineSequences": { + "description": "An ordered list of character sequences that were treated as line breaks when computing region information for the run.", + "type": "array", + "minItems": 1, + "uniqueItems": true, + "default": ["\r\n", "\n"], + "items": { + "type": "string" + } + }, + + "columnKind": { + "description": "Specifies the unit in which the tool measures columns.", + "enum": ["utf16CodeUnits", "unicodeCodePoints"], + "type": "string" + }, + + "externalPropertyFileReferences": { + "description": "References to external property files that should be inlined with the content of a root log file.", + "$ref": "#/definitions/externalPropertyFileReferences" + }, + + "threadFlowLocations": { + "description": "An array of threadFlowLocation objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + + "taxonomies": { + "description": "An array of toolComponent objects relevant to a taxonomy in which results are categorized.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "addresses": { + "description": "Addresses associated with this run instance, if any.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "$ref": "#/definitions/address" + } + }, + + "translations": { + "description": "The set of available translations of the localized data provided by the tool.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "policies": { + "description": "Contains configurations that may potentially override both reportingDescriptor.defaultConfiguration (the tool's default severities) and invocation.configurationOverrides (severities established at run-time from the command line).", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "webRequests": { + "description": "An array of request objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webRequest" + } + }, + + "webResponses": { + "description": "An array of response objects cached at run level.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/webResponse" + } + }, + + "specialLocations": { + "description": "A specialLocations object that defines locations of special significance to SARIF consumers.", + "$ref": "#/definitions/specialLocations" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the run.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["tool"] + }, + + "runAutomationDetails": { + "description": "Information that describes a run's identity and role within an engineering system process.", + "additionalProperties": false, + "type": "object", + "properties": { + "description": { + "description": "A description of the identity and role played within the engineering system by this object's containing run object.", + "$ref": "#/definitions/message" + }, + + "id": { + "description": "A hierarchical string that uniquely identifies this object's containing run object.", + "type": "string" + }, + + "guid": { + "description": "A stable, unique identifier for this object's containing run object in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "correlationGuid": { + "description": "A stable, unique identifier for the equivalence class of runs to which this object's containing run object belongs in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the run automation details.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "specialLocations": { + "description": "Defines locations of special significance to SARIF consumers.", + "type": "object", + "additionalProperties": false, + "properties": { + "displayBase": { + "description": "Provides a suggestion to SARIF consumers to display file paths relative to the specified location.", + "$ref": "#/definitions/artifactLocation" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the special locations.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "stack": { + "description": "A call stack that is relevant to a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "message": { + "description": "A message relevant to this call stack.", + "$ref": "#/definitions/message" + }, + + "frames": { + "description": "An array of stack frames that represents a sequence of calls, rendered in reverse chronological order, that comprise the call stack.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/stackFrame" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the stack.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["frames"] + }, + + "stackFrame": { + "description": "A function call within a stack trace.", + "additionalProperties": false, + "type": "object", + "properties": { + "location": { + "description": "The location to which this stack frame refers.", + "$ref": "#/definitions/location" + }, + + "module": { + "description": "The name of the module that contains the code of this stack frame.", + "type": "string" + }, + + "threadId": { + "description": "The thread identifier of the stack frame.", + "type": "integer" + }, + + "parameters": { + "description": "The parameters of the call that is executing.", + "type": "array", + "minItems": 0, + "uniqueItems": false, + "default": [], + "items": { + "type": "string", + "default": [] + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the stack frame.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "suppression": { + "description": "A suppression that is relevant to a result.", + "additionalProperties": false, + "type": "object", + "properties": { + "guid": { + "description": "A stable, unique identifier for the suprression in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "kind": { + "description": "A string that indicates where the suppression is persisted.", + "enum": ["inSource", "external"], + "type": "string" + }, + + "status": { + "description": "A string that indicates the review status of the suppression.", + "enum": ["accepted", "underReview", "rejected"], + "type": "string" + }, + + "justification": { + "description": "A string representing the justification for the suppression.", + "type": "string" + }, + + "location": { + "description": "Identifies the location associated with the suppression.", + "$ref": "#/definitions/location" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the suppression.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["kind"] + }, + + "threadFlow": { + "description": "Describes a sequence of code locations that specify a path through a single thread of execution such as an operating system or fiber.", + "type": "object", + "additionalProperties": false, + "properties": { + "id": { + "description": "An string that uniquely identifies the threadFlow within the codeFlow in which it occurs.", + "type": "string" + }, + + "message": { + "description": "A message relevant to the thread flow.", + "$ref": "#/definitions/message" + }, + + "initialState": { + "description": "Values of relevant expressions at the start of the thread flow that may change during thread flow execution.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "immutableState": { + "description": "Values of relevant expressions at the start of the thread flow that remain constant.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "locations": { + "description": "A temporally ordered array of 'threadFlowLocation' objects, each of which describes a location visited by the tool while producing the result.", + "type": "array", + "minItems": 1, + "uniqueItems": false, + "items": { + "$ref": "#/definitions/threadFlowLocation" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the thread flow.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["locations"] + }, + + "threadFlowLocation": { + "description": "A location visited by an analysis tool while simulating or monitoring the execution of a program.", + "additionalProperties": false, + "type": "object", + "properties": { + "index": { + "description": "The index within the run threadFlowLocations array.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "location": { + "description": "The code location.", + "$ref": "#/definitions/location" + }, + + "stack": { + "description": "The call stack leading to this location.", + "$ref": "#/definitions/stack" + }, + + "kinds": { + "description": "A set of distinct strings that categorize the thread flow location. Well-known kinds include 'acquire', 'release', 'enter', 'exit', 'call', 'return', 'branch', 'implicit', 'false', 'true', 'caution', 'danger', 'unknown', 'unreachable', 'taint', 'function', 'handler', 'lock', 'memory', 'resource', 'scope' and 'value'.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "type": "string" + } + }, + + "taxa": { + "description": "An array of references to rule or taxonomy reporting descriptors that are applicable to the thread flow location.", + "type": "array", + "default": [], + "minItems": 0, + "uniqueItems": true, + "items": { + "$ref": "#/definitions/reportingDescriptorReference" + } + }, + + "module": { + "description": "The name of the module that contains the code that is executing.", + "type": "string" + }, + + "state": { + "description": "A dictionary, each of whose keys specifies a variable or expression, the associated value of which represents the variable or expression value. For an annotation of kind 'continuation', for example, this dictionary might hold the current assumed values of a set of global variables.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "nestingLevel": { + "description": "An integer representing a containment hierarchy within the thread flow.", + "type": "integer", + "minimum": 0 + }, + + "executionOrder": { + "description": "An integer representing the temporal order in which execution reached this location.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "executionTimeUtc": { + "description": "The Coordinated Universal Time (UTC) date and time at which this location was executed.", + "type": "string", + "format": "date-time" + }, + + "importance": { + "description": "Specifies the importance of this location in understanding the code flow in which it occurs. The order from most to least important is \"essential\", \"important\", \"unimportant\". Default: \"important\".", + "enum": ["important", "essential", "unimportant"], + "default": "important", + "type": "string" + }, + + "webRequest": { + "description": "A web request associated with this thread flow location.", + "$ref": "#/definitions/webRequest" + }, + + "webResponse": { + "description": "A web response associated with this thread flow location.", + "$ref": "#/definitions/webResponse" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the threadflow location.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "tool": { + "description": "The analysis tool that was run.", + "additionalProperties": false, + "type": "object", + "properties": { + "driver": { + "description": "The analysis tool that was run.", + "$ref": "#/definitions/toolComponent" + }, + + "extensions": { + "description": "Tool extensions that contributed to or reconfigured the analysis tool that was run.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponent" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the tool.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["driver"] + }, + + "toolComponent": { + "description": "A component, such as a plug-in or the driver, of the analysis tool that was run.", + "additionalProperties": false, + "type": "object", + "properties": { + "guid": { + "description": "A unique identifier for the tool component in the form of a GUID.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "name": { + "description": "The name of the tool component.", + "type": "string" + }, + + "organization": { + "description": "The organization or company that produced the tool component.", + "type": "string" + }, + + "product": { + "description": "A product suite to which the tool component belongs.", + "type": "string" + }, + + "productSuite": { + "description": "A localizable string containing the name of the suite of products to which the tool component belongs.", + "type": "string" + }, + + "shortDescription": { + "description": "A brief description of the tool component.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "fullDescription": { + "description": "A comprehensive description of the tool component.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "fullName": { + "description": "The name of the tool component along with its version and any other useful identifying information, such as its locale.", + "type": "string" + }, + + "version": { + "description": "The tool component version, in whatever format the component natively provides.", + "type": "string" + }, + + "semanticVersion": { + "description": "The tool component version in the format specified by Semantic Versioning 2.0.", + "type": "string" + }, + + "dottedQuadFileVersion": { + "description": "The binary version of the tool component's primary executable file expressed as four non-negative integers separated by a period (for operating systems that express file versions in this way).", + "type": "string", + "pattern": "[0-9]+(\\.[0-9]+){3}" + }, + + "releaseDateUtc": { + "description": "A string specifying the UTC date (and optionally, the time) of the component's release.", + "type": "string" + }, + + "downloadUri": { + "description": "The absolute URI from which the tool component can be downloaded.", + "type": "string", + "format": "uri" + }, + + "informationUri": { + "description": "The absolute URI at which information about this version of the tool component can be found.", + "type": "string", + "format": "uri" + }, + + "globalMessageStrings": { + "description": "A dictionary, each of whose keys is a resource identifier and each of whose values is a multiformatMessageString object, which holds message strings in plain text and (optionally) Markdown format. The strings can include placeholders, which can be used to construct a message in combination with an arbitrary number of additional string arguments.", + "type": "object", + "additionalProperties": { + "$ref": "#/definitions/multiformatMessageString" + } + }, + + "notifications": { + "description": "An array of reportingDescriptor objects relevant to the notifications related to the configuration and runtime execution of the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + + "rules": { + "description": "An array of reportingDescriptor objects relevant to the analysis performed by the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + + "taxa": { + "description": "An array of reportingDescriptor objects relevant to the definitions of both standalone and tool-defined taxonomies.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/reportingDescriptor" + } + }, + + "locations": { + "description": "An array of the artifactLocation objects associated with the tool component.", + "type": "array", + "minItems": 0, + "default": [], + "items": { + "$ref": "#/definitions/artifactLocation" + } + }, + + "language": { + "description": "The language of the messages emitted into the log file during this run (expressed as an ISO 639-1 two-letter lowercase language code) and an optional region (expressed as an ISO 3166-1 two-letter uppercase subculture code associated with a country or region). The casing is recommended but not required (in order for this data to conform to RFC5646).", + "type": "string", + "default": "en-US", + "pattern": "^[a-zA-Z]{2}(-[a-zA-Z]{2})?$" + }, + + "contents": { + "description": "The kinds of data contained in this object.", + "type": "array", + "uniqueItems": true, + "default": ["localizedData", "nonLocalizedData"], + "items": { + "enum": ["localizedData", "nonLocalizedData"], + "type": "string" + } + }, + + "isComprehensive": { + "description": "Specifies whether this object contains a complete definition of the localizable and/or non-localizable data for this component, as opposed to including only data that is relevant to the results persisted to this log file.", + "type": "boolean", + "default": false + }, + + "localizedDataSemanticVersion": { + "description": "The semantic version of the localized strings defined in this component; maintained by components that provide translations.", + "type": "string" + }, + + "minimumRequiredLocalizedDataSemanticVersion": { + "description": "The minimum value of localizedDataSemanticVersion required in translations consumed by this component; used by components that consume translations.", + "type": "string" + }, + + "associatedComponent": { + "description": "The component which is strongly associated with this component. For a translation, this refers to the component which has been translated. For an extension, this is the driver that provides the extension's plugin model.", + "$ref": "#/definitions/toolComponentReference" + }, + + "translationMetadata": { + "description": "Translation metadata, required for a translation, not populated by other component types.", + "$ref": "#/definitions/translationMetadata" + }, + + "supportedTaxonomies": { + "description": "An array of toolComponentReference objects to declare the taxonomies supported by the tool component.", + "type": "array", + "minItems": 0, + "uniqueItems": true, + "default": [], + "items": { + "$ref": "#/definitions/toolComponentReference" + } + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the tool component.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["name"] + }, + + "toolComponentReference": { + "description": "Identifies a particular toolComponent object, either the driver or an extension.", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The 'name' property of the referenced toolComponent.", + "type": "string" + }, + + "index": { + "description": "An index into the referenced toolComponent in tool.extensions.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "guid": { + "description": "The 'guid' property of the referenced toolComponent.", + "type": "string", + "pattern": "^[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}$" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the toolComponentReference.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "translationMetadata": { + "description": "Provides additional metadata related to translation.", + "type": "object", + "additionalProperties": false, + "properties": { + "name": { + "description": "The name associated with the translation metadata.", + "type": "string" + }, + + "fullName": { + "description": "The full name associated with the translation metadata.", + "type": "string" + }, + + "shortDescription": { + "description": "A brief description of the translation metadata.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "fullDescription": { + "description": "A comprehensive description of the translation metadata.", + "$ref": "#/definitions/multiformatMessageString" + }, + + "downloadUri": { + "description": "The absolute URI from which the translation metadata can be downloaded.", + "type": "string", + "format": "uri" + }, + + "informationUri": { + "description": "The absolute URI from which information related to the translation metadata can be downloaded.", + "type": "string", + "format": "uri" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the translation metadata.", + "$ref": "#/definitions/propertyBag" + } + }, + "required": ["name"] + }, + + "versionControlDetails": { + "description": "Specifies the information necessary to retrieve a desired revision from a version control system.", + "type": "object", + "additionalProperties": false, + "properties": { + "repositoryUri": { + "description": "The absolute URI of the repository.", + "type": "string", + "format": "uri" + }, + + "revisionId": { + "description": "A string that uniquely and permanently identifies the revision within the repository.", + "type": "string" + }, + + "branch": { + "description": "The name of a branch containing the revision.", + "type": "string" + }, + + "revisionTag": { + "description": "A tag that has been applied to the revision.", + "type": "string" + }, + + "asOfTimeUtc": { + "description": "A Coordinated Universal Time (UTC) date and time that can be used to synchronize an enlistment to the state of the repository at that time.", + "type": "string", + "format": "date-time" + }, + + "mappedTo": { + "description": "The location in the local file system to which the root of the repository was mapped at the time of the analysis.", + "$ref": "#/definitions/artifactLocation" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the version control details.", + "$ref": "#/definitions/propertyBag" + } + }, + + "required": ["repositoryUri"] + }, + + "webRequest": { + "description": "Describes an HTTP request.", + "type": "object", + "additionalProperties": false, + "properties": { + "index": { + "description": "The index within the run.webRequests array of the request object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "protocol": { + "description": "The request protocol. Example: 'http'.", + "type": "string" + }, + + "version": { + "description": "The request version. Example: '1.1'.", + "type": "string" + }, + + "target": { + "description": "The target of the request.", + "type": "string" + }, + + "method": { + "description": "The HTTP method. Well-known values are 'GET', 'PUT', 'POST', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'.", + "type": "string" + }, + + "headers": { + "description": "The request headers.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "parameters": { + "description": "The request parameters.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "body": { + "description": "The body of the request.", + "$ref": "#/definitions/artifactContent" + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the request.", + "$ref": "#/definitions/propertyBag" + } + } + }, + + "webResponse": { + "description": "Describes the response to an HTTP request.", + "type": "object", + "additionalProperties": false, + "properties": { + "index": { + "description": "The index within the run.webResponses array of the response object associated with this result.", + "type": "integer", + "default": -1, + "minimum": -1 + }, + + "protocol": { + "description": "The response protocol. Example: 'http'.", + "type": "string" + }, + + "version": { + "description": "The response version. Example: '1.1'.", + "type": "string" + }, + + "statusCode": { + "description": "The response status code. Example: 451.", + "type": "integer" + }, + + "reasonPhrase": { + "description": "The response reason. Example: 'Not found'.", + "type": "string" + }, + + "headers": { + "description": "The response headers.", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + + "body": { + "description": "The body of the response.", + "$ref": "#/definitions/artifactContent" + }, + + "noResponseReceived": { + "description": "Specifies whether a response was received from the server.", + "type": "boolean", + "default": false + }, + + "properties": { + "description": "Key/value pairs that provide additional information about the response.", + "$ref": "#/definitions/propertyBag" + } + } + } + } +} diff --git a/test/fixtures/snyk-config-no-version/.snyk b/test/fixtures/snyk-config-no-version/.snyk index 8eefd88a23..fc75de000a 100644 --- a/test/fixtures/snyk-config-no-version/.snyk +++ b/test/fixtures/snyk-config-no-version/.snyk @@ -17,6 +17,11 @@ ignore: - 'tap@0.7.1 > runforcover@0.0.2 > bunker@0.1.2 > burrito@0.2.12 > uglify-js@1.1.1': reason: Stuff expires: '2015-12-20T16:37:39.553Z' + 'snyk:lic:npm:kafkajs-metrics:MPL-2.0': + - '*': + reason: 'Used but not modified, and not mixing source code' + expires: '2123-06-15T12:19:00.000Z' + created: '2023-06-15T12:19:00.000Z' patch: 'npm:ms:20151024': - 'socket.io@1.3.7 > debug@2.1.0 > ms@0.6.2': diff --git a/test/fixtures/snyk-config-no-version/expected b/test/fixtures/snyk-config-no-version/expected index 2f6439c270..84480dbc5f 100644 --- a/test/fixtures/snyk-config-no-version/expected +++ b/test/fixtures/snyk-config-no-version/expected @@ -43,6 +43,11 @@ tap@0.7.1 > runforcover@0.0.2 > bunker@0.1.2 > burrito@0.2.12 > uglify-js@1.1.1 Reason: Stuff Expires: Sun, 20 Dec 2015 16:37:39 GMT +#4 Ignore https://snyk.io/vuln/snyk:lic:npm:kafkajs-metrics:MPL-2.0 in the following paths: +* +Reason: Used but not modified, and not mixing source code +Expires: Tue, 15 Jun 2123 12:19:00 GMT + ------------------------ #1 Exclude the following code items/paths: diff --git a/test/jest/acceptance/analytics.spec.ts b/test/jest/acceptance/analytics.spec.ts index 95fd81e111..f8cb6c5170 100644 --- a/test/jest/acceptance/analytics.spec.ts +++ b/test/jest/acceptance/analytics.spec.ts @@ -405,7 +405,11 @@ describe('analytics module', () => { expect(code).toBe(0); - const lastRequest = server.popRequest(); + const requests = server.getRequests().filter((value) => { + return value.url == '/api/v1/analytics/cli'; + }); + const lastRequest = requests.at(-1); + expect(lastRequest).toMatchObject({ headers: { host: `localhost:${port}`, diff --git a/test/jest/acceptance/auth.spec.ts b/test/jest/acceptance/auth.spec.ts index d2921287e7..8b760be4bb 100644 --- a/test/jest/acceptance/auth.spec.ts +++ b/test/jest/acceptance/auth.spec.ts @@ -55,6 +55,12 @@ describe('Auth', () => { }, ); expect(code).toEqual(0); + + // Run this command to verify that it succeeds with oauth, since it is implemented in TS + const ignoreCode = await runSnykCLI(`ignore --id=das`, { + env, + }); + expect(ignoreCode.code).toEqual(0); }); it('fails to us oauth client credentials grant to authenticate', async () => { diff --git a/test/jest/acceptance/cli-in-memory-threshold-bytes.spec.ts b/test/jest/acceptance/cli-in-memory-threshold-bytes.spec.ts new file mode 100644 index 0000000000..b2991f4b78 --- /dev/null +++ b/test/jest/acceptance/cli-in-memory-threshold-bytes.spec.ts @@ -0,0 +1,142 @@ +import * as os from 'os'; +import * as path from 'path'; +import { resolve } from 'path'; + +import { runSnykCLI } from '../util/runSnykCLI'; +import { matchers } from 'jest-json-schema'; +import * as fs from 'fs'; + +const projectRoot = resolve(__dirname, '../../..'); + +expect.extend(matchers); + +// For golang implementation only +describe('conditionally write data to disk', () => { + const projectWithCodeIssues = resolve( + projectRoot, + 'test/fixtures/sast/with_code_issues', + ); + + const env = { + // Use an org with consistent ignores enabled - uses golang/native workflow + SNYK_API: process.env.TEST_SNYK_API_DEV, + SNYK_TOKEN: process.env.TEST_SNYK_TOKEN_DEV, + SNYK_LOG_LEVEL: 'trace', + INTERNAL_CLEANUP_GLOBAL_TEMP_DIR_ENABLED: 'false', // disable cleanup of temp dir for testing + }; + + jest.setTimeout(60000); + + // GAF automatically creates the temp dir + // GAF will also automatically deletes it + // but we disable this for testing + const tempDirName = `tempDir-${Date.now()}`; + const tempDirPath = path.join(os.tmpdir(), tempDirName); + + afterEach(async () => { + // delete tempDirPath + try { + await fs.promises.rm(tempDirPath, { recursive: true, force: true }); + } catch { + console.warn('teardown failed'); + } + }); + + describe('when temp dir and threshold are set', () => { + const tempDirVars = { + SNYK_TMP_PATH: tempDirPath, + INTERNAL_IN_MEMORY_THRESHOLD_BYTES: '1', + }; + + it('should write to temp dir if payload is bigger than threshold', async () => { + await runSnykCLI(`code test ${projectWithCodeIssues}`, { + env: { + ...process.env, + ...env, + ...tempDirVars, + }, + }); + + // assert that tempDirPath exists + await expect( + fs.promises.access(tempDirPath, fs.constants.F_OK), + ).resolves.toBeUndefined(); + + // assert that tempDirPath contains workflow files + const files = await fs.promises.readdir(tempDirPath); + const workflowFiles = files.filter((file) => file.includes('workflow.')); + expect(workflowFiles.length).toBeGreaterThan(0); + }); + }); + + describe('when only threshold is set', () => { + const tempDirVars = { + INTERNAL_IN_MEMORY_THRESHOLD_BYTES: '1', + INTERNAL_CLEANUP_GLOBAL_TEMP_DIR_ENABLED: 'true', // re-enable as we're not setting the temp dir, and we want to ensure we cleanup + }; + + it('should write to default temp dir if payload is bigger than threshold', async () => { + await runSnykCLI(`code test ${projectWithCodeIssues}`, { + env: { + ...process.env, + ...env, + ...tempDirVars, + }, + }); + + // note we can't determine whether we write to disk or memory without inspecting logs + // GAF uses the default OS cache dir to write to, which we cannot access in the test + + // assert that tempDirPath does not exist + await expect( + fs.promises.access(tempDirPath, fs.constants.F_OK), + ).rejects.toThrow(); + }); + }); + + describe('when temp dir and threshold are NOT set', () => { + const tempDirVars = { + INTERNAL_CLEANUP_GLOBAL_TEMP_DIR_ENABLED: 'true', // re-enable as we're not setting the temp dir, and we want to ensure we cleanup + }; + + it('should use 512MB as default threshold', async () => { + await runSnykCLI(`code test ${projectWithCodeIssues}`, { + env: { + ...process.env, + ...env, + ...tempDirVars, + }, + }); + + // note we can't determine whether we write to disk or memory without inspecting logs + // GAF uses the default OS cache dir to write to, which we cannot access in the test + + // assert that tempDirPath does not exist + await expect( + fs.promises.access(tempDirPath, fs.constants.F_OK), + ).rejects.toThrow(); + }); + }); + + describe('when feature is disabled', () => { + const tempDirVars = { + INTERNAL_IN_MEMORY_THRESHOLD_BYTES: '-1', + INTERNAL_CLEANUP_GLOBAL_TEMP_DIR_ENABLED: 'true', // re-enable as we're not setting the temp dir, and we want to ensure we cleanup + }; + + it('should keep payload memory', async () => { + await runSnykCLI(`code test ${projectWithCodeIssues}`, { + env: { + ...process.env, + ...env, + ...tempDirVars, + }, + }); + + // assert that tempDirPath does not exist + await expect( + fs.promises.access(tempDirPath, fs.constants.F_OK), + ).rejects.toThrow(); + }); + }); +}); diff --git a/test/jest/acceptance/cli-json-file-output.spec.ts b/test/jest/acceptance/cli-json-file-output.spec.ts index 28f5138920..ec01fa175b 100644 --- a/test/jest/acceptance/cli-json-file-output.spec.ts +++ b/test/jest/acceptance/cli-json-file-output.spec.ts @@ -74,9 +74,8 @@ describe('test --json-file-output', () => { const reference = response.result.issuesData['SNYK-ALPINE319-OPENSSL-6148881'] .references[0]; - response.result.issuesData[ - 'SNYK-ALPINE319-OPENSSL-6148881' - ].references = new Array(420000).fill(reference); + response.result.issuesData['SNYK-ALPINE319-OPENSSL-6148881'].references = + new Array(420000).fill(reference); server.setCustomResponse(response); diff --git a/test/jest/acceptance/cli-json-output.spec.ts b/test/jest/acceptance/cli-json-output.spec.ts index 1fb65b7b3f..abbd1567e1 100644 --- a/test/jest/acceptance/cli-json-output.spec.ts +++ b/test/jest/acceptance/cli-json-output.spec.ts @@ -113,7 +113,7 @@ describe('test --json', () => { let hasReferenceCount = false; const p = new Parser(); - p.onValue = function(value) { + p.onValue = function (value) { if (this.key === 'path' && value === imageName) { hasExpectedPathString = true; } else if (this.key === 'vulnerabilities') { diff --git a/test/jest/acceptance/extra-certs.spec.ts b/test/jest/acceptance/extra-certs.spec.ts index 72ce9d53d3..71edeb5be8 100644 --- a/test/jest/acceptance/extra-certs.spec.ts +++ b/test/jest/acceptance/extra-certs.spec.ts @@ -56,7 +56,7 @@ describe('Extra CA certificates specified with `NODE_EXTRA_CA_CERTS`', () => { await server.listenWithHttps(port, { cert: certPem, key: keyPem }); // invoke WITHOUT additional certificate set => fails - const res1 = await runSnykCLI(`test --debug`, { + const res1Promise = runSnykCLI(`test --debug`, { env: { ...process.env, SNYK_API: SNYK_API, @@ -65,7 +65,7 @@ describe('Extra CA certificates specified with `NODE_EXTRA_CA_CERTS`', () => { }); // invoke WITH additional certificate set => succeeds - const res2 = await runSnykCLI(`test --debug`, { + const res2Promise = runSnykCLI(`test --debug`, { env: { ...process.env, NODE_EXTRA_CA_CERTS: 'cliv2/mytestcert.crt', @@ -74,10 +74,8 @@ describe('Extra CA certificates specified with `NODE_EXTRA_CA_CERTS`', () => { }, }); - let res3 = { code: 2 }; - let res4 = { code: 0 }; // invoke WITHOUT additional certificate set => succeeds - res3 = await runSnykCLI( + const res3Promise = runSnykCLI( `sbom --debug --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format cyclonedx1.4+json`, { env: { @@ -89,7 +87,7 @@ describe('Extra CA certificates specified with `NODE_EXTRA_CA_CERTS`', () => { ); // invoke WITH additional certificate set => succeeds - res4 = await runSnykCLI( + const res4Promise = runSnykCLI( `sbom --debug --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format cyclonedx1.4+json`, { env: { @@ -101,14 +99,22 @@ describe('Extra CA certificates specified with `NODE_EXTRA_CA_CERTS`', () => { }, ); + const [res1, res2, res3, res4] = await Promise.all([ + res1Promise, + res2Promise, + res3Promise, + res4Promise, + ]); + await server.closePromise(); expect(res1.code).toBe(2); expect(res2.code).toBe(0); expect(res3.code).toBe(2); expect(res4.code).toBe(0); - fs.unlink('cliv2/mytestcert.crt', () => {}); - fs.unlink('cliv2/mytestcert.key', () => {}); - fs.unlink('cliv2/mytestcert.pem', () => {}); + + fs.unlinkSync('cliv2/mytestcert.crt'); + fs.unlinkSync('cliv2/mytestcert.key'); + fs.unlinkSync('cliv2/mytestcert.pem'); }); }); diff --git a/test/jest/acceptance/https.spec.ts b/test/jest/acceptance/https.spec.ts index a69737e07e..669854fbe9 100644 --- a/test/jest/acceptance/https.spec.ts +++ b/test/jest/acceptance/https.spec.ts @@ -71,10 +71,7 @@ describe('https', () => { expect(code).toBe(0); // get rid of the first entry which has another User Agent - server - .getRequests() - .reverse() - .pop(); + server.getRequests().reverse().pop(); for (const r of server.getRequests()) { expect(r.headers['user-agent']).toContain('snyk-cli/'); diff --git a/test/jest/acceptance/iac/cli-share-results.spec.ts b/test/jest/acceptance/iac/cli-share-results.spec.ts index a811062f7f..e9c0deb686 100644 --- a/test/jest/acceptance/iac/cli-share-results.spec.ts +++ b/test/jest/acceptance/iac/cli-share-results.spec.ts @@ -23,11 +23,10 @@ describe('CLI Share Results', () => { afterAll(async () => teardown()); - describe('feature flag is not enabled', () => { - beforeAll(() => { + describe('feature flag iacCliShareResults is not enabled', () => { + beforeEach(() => { server.setFeatureFlag('iacCliShareResults', false); }); - it('the output includes an error', async () => { const { stdout, exitCode } = await run( `snyk iac test ./iac/arm/rule_test.json --report`, @@ -40,8 +39,8 @@ describe('CLI Share Results', () => { }); }); - describe('feature flag is enabled', () => { - beforeAll(() => { + describe('feature flag iacCliShareResults is enabled', () => { + beforeEach(() => { server.setFeatureFlag('iacCliShareResults', true); }); @@ -252,4 +251,21 @@ describe('CLI Share Results', () => { }); }); }); + + describe('feature flag iacNewEngine is enabled', () => { + beforeEach(() => { + server.setFeatureFlag('iacNewEngine', true); + }); + + it('the output includes an error', async () => { + const { stdout, exitCode } = await run( + `snyk iac test ./iac/arm/rule_test.json --report`, + ); + + expect(stdout).toMatch( + 'flag --report is not yet supported when iacNewEngine flag is enabled', + ); + expect(exitCode).toBe(2); + }); + }); }); diff --git a/test/jest/acceptance/iac/helpers.ts b/test/jest/acceptance/iac/helpers.ts index 1e0fd3a3f2..f89652b8a3 100644 --- a/test/jest/acceptance/iac/helpers.ts +++ b/test/jest/acceptance/iac/helpers.ts @@ -69,7 +69,7 @@ export async function run( }, cwd: cwd ?? join(root, 'test/fixtures'), }, - function(err, stdout, stderr) { + function (err, stdout, stderr) { // err.code indicates the shell exited with non-zero code // which is in our case a success and we resolve. if (err && typeof err.code !== 'number') { diff --git a/test/jest/acceptance/iac/test-terraform.spec.ts b/test/jest/acceptance/iac/test-terraform.spec.ts index fb77c23c13..17bbff4f5f 100644 --- a/test/jest/acceptance/iac/test-terraform.spec.ts +++ b/test/jest/acceptance/iac/test-terraform.spec.ts @@ -1,10 +1,12 @@ import { isValidJSONString, startMockServer } from './helpers'; import * as path from 'path'; import { EOL } from 'os'; +import { FakeServer } from '../../../acceptance/fake-server'; jest.setTimeout(50000); describe('Terraform', () => { + let server: FakeServer; let run: ( cmd: string, ) => Promise<{ stdout: string; stderr: string; exitCode: number }>; @@ -14,10 +16,15 @@ describe('Terraform', () => { const result = await startMockServer(); run = result.run; teardown = result.teardown; + server = result.server; }); afterAll(async () => teardown()); + afterEach(() => { + server.restore(); + }); + describe('Terraform single file scans', () => { it('finds issues in Terraform file', async () => { const { stdout, exitCode } = await run( @@ -78,6 +85,29 @@ describe('Terraform', () => { ); expect(exitCode).toBe(3); }); + + describe('when the iacNewEngine feature flag is enabled', () => { + beforeAll(() => { + server.setFeatureFlag('iacNewEngine', true); + }); + it('uses the new engine, hence the new policies are used', async () => { + const { stdout, exitCode } = await run( + `snyk iac test ./iac/terraform/sg_open_ssh.tf --json`, + ); + + expect(isValidJSONString(stdout)).toBe(true); + + const jsonOut = JSON.parse(stdout); + expect(jsonOut).toHaveLength(1); + + const issues: any[] = jsonOut[0]?.infrastructureAsCodeIssues; + expect(issues).toHaveLength(2); + + const sortedIds = issues.map((issue) => issue.id).sort(); + expect(sortedIds).toEqual(['SNYK-CC-00110', 'SNYK-CC-00747']); + expect(exitCode).toBe(1); + }); + }); }); describe('Terraform directories', () => { diff --git a/test/jest/acceptance/iac/update-exclude-policy.spec.ts b/test/jest/acceptance/iac/update-exclude-policy.spec.ts index fb6d10dde2..44dd46c035 100644 --- a/test/jest/acceptance/iac/update-exclude-policy.spec.ts +++ b/test/jest/acceptance/iac/update-exclude-policy.spec.ts @@ -58,6 +58,8 @@ describe('iac update-exclude-policy', () => { const policy = await findAndLoadPolicy(tmpFolderPath, 'iac', {}); const expectedExcludes = { + global: [], + code: [], 'iac-drift': [ 'aws_iam_user.test-driftctl2', 'aws_iam_access_key.AKIA5QYBVVD2Y6PBAAPY', diff --git a/test/jest/acceptance/parallel-execution.spec.ts b/test/jest/acceptance/parallel-execution.spec.ts index 3d8db14046..6572601ed1 100644 --- a/test/jest/acceptance/parallel-execution.spec.ts +++ b/test/jest/acceptance/parallel-execution.spec.ts @@ -15,9 +15,10 @@ describe('Parallel CLI execution', () => { singleTestResult.push(runSnykCLI(`test -d`, { cwd: project.path() })); } - for (let i = 0; i < numberOfParallelExecutions; i++) { - const { code } = await singleTestResult[i]; - expect(code).toBe(1); - } + const results = await Promise.all(singleTestResult); + + results.forEach((result) => { + expect(result.code).toBe(1); + }); }); }); diff --git a/test/jest/acceptance/snyk-apps/config.spec.ts b/test/jest/acceptance/snyk-apps/config.spec.ts index 789118bc55..69b7b4a490 100644 --- a/test/jest/acceptance/snyk-apps/config.spec.ts +++ b/test/jest/acceptance/snyk-apps/config.spec.ts @@ -34,9 +34,7 @@ describe('config', () => { SNYK_API_REST_URL: 'http://localhost:' + port + baseURL, }; - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `apps create --org=${testData.orgId} --name=${testData.appName} --redirect-uris=${testData.redirectURIs} --scopes=${testData.scopes} --experimental`, { env }, ); @@ -50,9 +48,7 @@ describe('config', () => { SNYK_API_REST_URL: 'http://localhost:' + port + '/wrongbase', }; - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `apps create --org=${testData.orgId} --name=${testData.appName} --redirect-uris=${testData.redirectURIs} --scopes=${testData.scopes} --experimental`, { env }, ); @@ -66,9 +62,7 @@ describe('config', () => { SNYK_API_V3_URL: 'http://localhost:' + port + baseURL, }; - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `apps create --org=${testData.orgId} --name=${testData.appName} --redirect-uris=${testData.redirectURIs} --scopes=${testData.scopes} --experimental`, { env }, ); @@ -83,9 +77,7 @@ describe('config', () => { SNYK_API_V3_URL: 'http://localhost:' + port + baseURL, }; - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `apps create --org=${testData.orgId} --name=${testData.appName} --redirect-uris=${testData.redirectURIs} --scopes=${testData.scopes} --experimental`, { env }, ); diff --git a/test/jest/acceptance/snyk-code/snyk-code.spec.ts b/test/jest/acceptance/snyk-code/snyk-code.spec.ts index 0a8d3243c6..1e7a376422 100644 --- a/test/jest/acceptance/snyk-code/snyk-code.spec.ts +++ b/test/jest/acceptance/snyk-code/snyk-code.spec.ts @@ -3,14 +3,20 @@ import { runSnykCLI } from '../../util/runSnykCLI'; import { fakeServer } from '../../../acceptance/fake-server'; import { fakeDeepCodeServer } from '../../../acceptance/deepcode-fake-server'; import { getServerPort } from '../../util/getServerPort'; +import { matchers } from 'jest-json-schema'; +import { resolve } from 'path'; + const stripAnsi = require('strip-ansi'); +const projectRoot = resolve(__dirname, '../../../..'); + +expect.extend(matchers); const EXIT_CODE_SUCCESS = 0; const EXIT_CODE_ACTION_NEEDED = 1; const EXIT_CODE_FAIL_WITH_ERROR = 2; const EXIT_CODE_NO_SUPPORTED_FILES = 3; -describe('code', () => { +describe('snyk code test', () => { let server: ReturnType; let deepCodeServer: ReturnType; let env: Record; @@ -22,6 +28,13 @@ describe('code', () => { SNYK_HOST: 'http://localhost:' + port, SNYK_TOKEN: '123456789', }; + // expected Code Security Issues: 6 - 5 [High] 1 [Low] + // expected Code Quality Issues: 2 - 2 [Medium] + const projectWithCodeIssues = resolve( + projectRoot, + 'test/fixtures/sast/with_code_issues', + ); + const emptyProject = resolve(projectRoot, 'test/fixtures/empty'); beforeAll((done) => { deepCodeServer = fakeDeepCodeServer(); @@ -58,191 +71,403 @@ describe('code', () => { expect(stderr).toBe(''); }); - describe('test', () => { - it('should fail - when we do not support files', async () => { - // Setup - const { path } = await createProjectFromFixture('empty'); - server.setOrgSetting('sast', true); + interface Workflow { + type: string; + env: { [key: string]: string | undefined }; + } - const { stdout, code, stderr } = await runSnykCLI(`code test ${path()}`, { - env, - }); - - expect(stderr).toBe(''); - expect(stdout).toContain(`We found 0 supported files`); - expect(code).toBe(EXIT_CODE_NO_SUPPORTED_FILES); // failure, no supported projects detected - }); + const integrationWorkflows: Workflow[] = [ + { + type: 'typescript', + env: { + INTERNAL_SNYK_CODE_IGNORES_ENABLED: 'false', + }, + }, + { + type: 'golang/native', + env: { + // internal GAF feature flag for consistent ignores + INTERNAL_SNYK_CODE_IGNORES_ENABLED: 'true', + }, + }, + ]; + + describe.each(integrationWorkflows)( + `integration`, + ({ type, env: integrationEnv }) => { + describe(`${type} workflow`, () => { + it('should show error if sast is not enabled', async () => { + server.setOrgSetting('sast', false); + + const { code, stdout, stderr } = await runSnykCLI( + `code test ${projectWithCodeIssues}`, + { + env: { + ...env, + ...integrationEnv, + }, + }, + ); + + expect(stderr).toBe(''); + expect(stdout).toContain('Snyk Code is not supported for org'); + expect(code).toBe(EXIT_CODE_FAIL_WITH_ERROR); + }); - it('should succeed - when no errors found', async () => { - // Setup - const { path } = await createProjectFromFixture( - 'sast-empty/shallow_empty', - ); - server.setOrgSetting('sast', true); - deepCodeServer.setSarifResponse( - require('../../../fixtures/sast-empty/empty-sarif.json'), - ); - - const { stdout, code, stderr } = await runSnykCLI(`code test ${path()}`, { - env, - }); + it('should succeed with correct exit code - with sarif output', async () => { + const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); + server.setOrgSetting('sast', true); + deepCodeServer.setCustomResponse({ + configFiles: [], + extensions: ['.java'], + }); + deepCodeServer.setSarifResponse(sarifPayload); + + // code-client-go abstracts deeproxy calls, so fake-server needs these endpoints + server.setCustomResponse({ + configFiles: [], + extensions: ['.java'], + }); + + const { stderr, code } = await runSnykCLI( + `code test ${projectWithCodeIssues} --sarif`, + { + env: { + ...env, + ...integrationEnv, + // code-client-go will panic if we don't supply the org UUID + SNYK_CFG_ORG: '11111111-2222-3333-4444-555555555555', + }, + }, + ); + + expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + expect(stderr).toBe(''); + }); - expect(stderr).toBe(''); - expect(stdout).toContain(`Awesome! No issues were found.`); - expect(code).toBe(EXIT_CODE_SUCCESS); + it('should succeed with correct exit code - with json output', async () => { + const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); + server.setOrgSetting('sast', true); + deepCodeServer.setFiltersResponse({ + configFiles: [], + extensions: ['.java'], + }); + deepCodeServer.setSarifResponse(sarifPayload); + + // code-client-go abstracts deeproxy calls, so fake-server needs these endpoints + server.setCustomResponse({ + configFiles: [], + extensions: ['.java'], + }); + + const { stderr, code } = await runSnykCLI( + `code test ${projectWithCodeIssues} --json`, + { + env: { + ...env, + ...integrationEnv, + // code-client-go will panic if we don't supply the org UUID + SNYK_CFG_ORG: '11111111-2222-3333-4444-555555555555', + }, + }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + }); - expect( - server - .getRequests() - .filter((req) => req.originalUrl.endsWith('/analytics/cli')), - ).toHaveLength(2); - }); + it('should succeed with correct exit code - normal output', async () => { + const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); + server.setOrgSetting('sast', true); + deepCodeServer.setFiltersResponse({ + configFiles: [], + extensions: ['.java'], + }); + + deepCodeServer.setSarifResponse(sarifPayload); + + // code-client-go abstracts deeproxy calls, so fake-server needs these endpoints + server.setCustomResponse({ + configFiles: [], + extensions: ['.java'], + }); + + const { stderr, code } = await runSnykCLI( + `code test ${projectWithCodeIssues}`, + { + env: { + ...env, + ...integrationEnv, + // code-client-go will panic if we don't supply the org UUID + SNYK_CFG_ORG: '11111111-2222-3333-4444-555555555555', + }, + }, + ); + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + }); - it('should succeed - with correct exit code', async () => { - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', true); - deepCodeServer.setSarifResponse( - require('../../../fixtures/sast/sample-sarif.json'), - ); - - const { stdout, stderr, code } = await runSnykCLI(`code test ${path()}`, { - env, - }); + it('should fail with correct exit code - when testing empty project', async () => { + const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); + server.setOrgSetting('sast', true); + deepCodeServer.setSarifResponse(sarifPayload); - // We do not render the help message for unknown flags - expect(stderr).toBe(''); - expect(stripAnsi(stdout)).toContain('✗ [Medium] Information Exposure'); - expect(code).toBe(EXIT_CODE_ACTION_NEEDED); - }); + const { code } = await runSnykCLI(`code test ${emptyProject}`, { + env: { + ...env, + ...integrationEnv, + }, + }); - it('should show error if sast is not enabled', async () => { - // Setup - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', false); + expect(code).toBe(EXIT_CODE_NO_SUPPORTED_FILES); + }); - const { stdout, code, stderr } = await runSnykCLI(`code test ${path()}`, { - env, + // TODO: reenable tests for golang/native when SNYK_CODE_CLIENT_PROXY_URL && LCE are supported + if (type === 'typescript') { + it('should support the SNYK_CODE_CLIENT_PROXY_URL env var', async () => { + const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); + server.setOrgSetting('sast', true); + deepCodeServer.setSarifResponse(sarifPayload); + + const { code } = await runSnykCLI(`code test ${emptyProject}`, { + env: { + ...env, + ...integrationEnv, + }, + }); + + expect(code).toEqual(EXIT_CODE_NO_SUPPORTED_FILES); + + const request = deepCodeServer + .getRequests() + .filter((value) => (value.url as string).includes(`/filters`)) + .pop(); + + expect(request).toBeDefined(); + }); + + it('use remote LCE URL as base when LCE is enabled', async () => { + const localCodeEngineUrl = fakeDeepCodeServer(); + localCodeEngineUrl.listen(() => {}); + + server.setOrgSetting('sast', true); + server.setLocalCodeEngineConfiguration({ + enabled: true, + allowCloudUpload: true, + url: 'http://localhost:' + localCodeEngineUrl.getPort(), + }); + + localCodeEngineUrl.setSarifResponse( + require('../../../fixtures/sast/sample-sarif.json'), + ); + + // code-client-go abstracts deeproxy calls, so fake-server needs these endpoints + server.setCustomResponse({ + configFiles: [], + extensions: ['.java'], + }); + + const { stdout, code, stderr } = await runSnykCLI( + `code test ${projectWithCodeIssues}`, + { + env: { + ...env, + ...integrationEnv, + // code-client-go will panic if we don't supply the org UUID + SNYK_CFG_ORG: '11111111-2222-3333-4444-555555555555', + }, + }, + ); + + expect(stderr).toBe(''); + expect(deepCodeServer.getRequests().length).toBe(0); + expect(localCodeEngineUrl.getRequests().length).toBeGreaterThan(0); + expect(stripAnsi(stdout)).toContain('✗ [Medium]'); + expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + + localCodeEngineUrl.close(() => {}); + }); + } }); - - expect(stderr).toBe(''); - expect(stdout).toContain('Snyk Code is not supported for org'); - expect(code).toBe(EXIT_CODE_FAIL_WITH_ERROR); - }); - - it.each([['sarif'], ['json']])( - 'succeed testing with correct exit code - with %p output', - async (optionsName) => { - const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', true); - deepCodeServer.setSarifResponse(sarifPayload); - - const { stdout, stderr, code } = await runSnykCLI( - `code test ${path()} --${optionsName}`, - { - env, - }, - ); - - expect(stderr).toBe(''); - expect(JSON.parse(stdout)).toEqual(sarifPayload); - expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + }, + ); + + const userJourneyWorkflows: Workflow[] = [ + { + type: 'typescript', + env: { + // force use of legacy implementation + INTERNAL_SNYK_CODE_IGNORES_ENABLED: 'false', }, - ); - - it('succeed testing with correct exit code - with sarif oputput and no markdown', async () => { - const sarifPayload = require('../../../fixtures/sast/sample-sarif.json'); - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', true); - deepCodeServer.setSarifResponse(sarifPayload); - - const { stdout, stderr, code } = await runSnykCLI( - `code test ${path()} --sarif --no-markdown`, - { - env, - }, - ); - - expect(stderr).toBe(''); - const output = JSON.parse(stdout); - expect(Object.keys(output.runs[0].results[0].message)).not.toContain( - 'markdown', - ); - expect(code).toBe(EXIT_CODE_ACTION_NEEDED); - }); - - const failedCodeTestMessage = "Failed to run 'code test'"; - - // This is caused by the retry logic in the code-client - // which defaults to 10 retries with a 5 second delay - jest.setTimeout(60000); - it.each([ - [{ code: 401 }, `Unauthorized: ${failedCodeTestMessage}`], - [{ code: 429 }, failedCodeTestMessage], - [{ code: 500 }, failedCodeTestMessage], // TODO this causes the test to hang. Think it is due to retry logic - ])( - 'should fail - when server returns %p', - async (errorCodeObj, expectedResult) => { - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', true); - deepCodeServer.setNextStatusCode(errorCodeObj.code); - deepCodeServer.setNextResponse({ - statusCode: errorCodeObj.code, - statusText: 'Unauthorized action', - apiName: 'code', + }, + { + type: 'golang/native', + env: { + // Use an org with consistent ignores enabled - uses golang/native workflow + SNYK_API: process.env.TEST_SNYK_API_DEV, + SNYK_TOKEN: process.env.TEST_SNYK_TOKEN_DEV, + }, + }, + ]; + + describe.each(userJourneyWorkflows)( + 'user journey', + ({ type, env: integrationEnv }) => { + describe(`${type} workflow`, () => { + jest.setTimeout(60000); + + describe('snyk code flag options', () => { + it('works with --remote-repo-url', async () => { + const expectedCodeSecurityIssues = 6; + const { stdout } = await runSnykCLI( + `code test ${projectWithCodeIssues} --remote-repo-url=hhttps://github.com/snyk/cli.git --json`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + const actualCodeSecurityIssues = + JSON.parse(stdout)?.runs[0]?.results?.length; + expect(actualCodeSecurityIssues).toEqual( + expectedCodeSecurityIssues, + ); + }); + + // TODO: reenable when fixed in CLI-397, CLI-436 + if (type === 'typescript') { + it('works with --severity-threshold', async () => { + const expectedHighCodeSecurityIssues = 5; + const { stdout } = await runSnykCLI( + `code test ${projectWithCodeIssues} --json --severity-threshold=high`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + const actualCodeSecurityIssues = + JSON.parse(stdout)?.runs[0]?.results?.length; + expect(actualCodeSecurityIssues).toEqual( + expectedHighCodeSecurityIssues, + ); + }); + + it('works with --org', async () => { + const MADE_UP_ORG_WITH_NO_SNYK_CODE_PERMISSIONS = + 'madeUpOrgWithNoSnykCodePermissions'; + + const { stdout, code } = await runSnykCLI( + `code test ${projectWithCodeIssues} --org=${MADE_UP_ORG_WITH_NO_SNYK_CODE_PERMISSIONS}`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + expect(stdout).toContain( + `Org ${MADE_UP_ORG_WITH_NO_SNYK_CODE_PERMISSIONS} was not found`, + ); + expect(code).toBe(EXIT_CODE_FAIL_WITH_ERROR); + }); + } }); - const { stdout, code, stderr } = await runSnykCLI( - `code test ${path()}`, - { - env, - }, - ); + // TODO: reenable for golang/native when it supports no git context + if (type === 'typescript') { + it('works on projects with no git context', async () => { + // createProjectFromFixture creates a new project without gitcontext + const { path } = await createProjectFromFixture( + 'sast/shallow_sast_webgoat', + ); + + const { stderr, code } = await runSnykCLI(`code test ${path()}`, { + env: { + ...process.env, + ...integrationEnv, + }, + }); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_ACTION_NEEDED); + }); + } + + it('should succeed - when no vulnerabilities found', async () => { + const noVulnsProject = resolve( + projectRoot, + 'test/fixtures/sast/no-vulnerabilities', + ); + + const { stderr, code } = await runSnykCLI( + `code test ${noVulnsProject}`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_SUCCESS); + }); - expect(stderr).toBe(''); - expect(stdout).toContain(expectedResult); - expect(code).toBe(EXIT_CODE_FAIL_WITH_ERROR); - }, - ); + it('should not include code quality issues in results', async () => { + // expected Code Quality Issues: 2 - 2 [Medium] + const expectedCodeSecurityIssues = 6; + const { stdout } = await runSnykCLI( + `code test ${projectWithCodeIssues} --json`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + const actualCodeSecurityIssues = + JSON.parse(stdout)?.runs[0]?.results?.length; + expect(actualCodeSecurityIssues).toEqual(expectedCodeSecurityIssues); + }); - it("use remote LCE's url as base when LCE is enabled", async () => { - const localCodeEngineUrl = fakeDeepCodeServer(); - localCodeEngineUrl.listen(() => {}); - - const { path } = await createProjectFromFixture( - 'sast/shallow_sast_webgoat', - ); - server.setOrgSetting('sast', true); - server.setLocalCodeEngineConfiguration({ - enabled: true, - allowCloudUpload: true, - url: 'http://localhost:' + localCodeEngineUrl.getPort(), - }); - localCodeEngineUrl.setSarifResponse( - require('../../../fixtures/sast/sample-sarif.json'), - ); + it('should fail with correct exit code - when testing empty project', async () => { + const { stderr, code } = await runSnykCLI( + `code test ${emptyProject}`, + { + env: { + ...process.env, + ...integrationEnv, + }, + }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_NO_SUPPORTED_FILES); + }); - const { stdout, code, stderr } = await runSnykCLI(`code test ${path()}`, { - env, + it('should fail with correct exit code - when using invalid token', async () => { + const { stderr, code } = await runSnykCLI( + `code test ${projectWithCodeIssues}`, + { + env: { + ...process.env, + ...integrationEnv, + SNYK_TOKEN: 'woof', + }, + }, + ); + + expect(stderr).toBe(''); + expect(code).toBe(EXIT_CODE_FAIL_WITH_ERROR); + }); }); - - expect(deepCodeServer.getRequests().length).toBe(0); - expect(localCodeEngineUrl.getRequests().length).toBeGreaterThan(0); - expect(stderr).toBe(''); - expect(stripAnsi(stdout)).toContain('✗ [Medium] Information Exposure'); - expect(code).toBe(EXIT_CODE_ACTION_NEEDED); - - await localCodeEngineUrl.close(() => {}); - }); - }); + }, + ); }); diff --git a/test/jest/acceptance/snyk-config/snyk-config-environment.spec.ts b/test/jest/acceptance/snyk-config/snyk-config-environment.spec.ts index ef4aaf31b1..aae2b74350 100644 --- a/test/jest/acceptance/snyk-config/snyk-config-environment.spec.ts +++ b/test/jest/acceptance/snyk-config/snyk-config-environment.spec.ts @@ -51,10 +51,10 @@ describe('snyk config environment', () => { }); it('fail with an invalid env alias', async () => { - const { - code, - stderr, - } = await runSnykCLI(`config environment randomEnvName`, { env: env }); + const { code, stderr } = await runSnykCLI( + `config environment randomEnvName`, + { env: env }, + ); expect(stderr).toEqual(''); expect(code).toEqual(2); }); diff --git a/test/jest/acceptance/snyk-container/container.spec.ts b/test/jest/acceptance/snyk-container/container.spec.ts index e9b2ee8b19..6721021181 100644 --- a/test/jest/acceptance/snyk-container/container.spec.ts +++ b/test/jest/acceptance/snyk-container/container.spec.ts @@ -40,8 +40,7 @@ describe('snyk container', () => { id: 'base-files@11.1+deb11u7', info: { name: 'base-files', - purl: - 'pkg:deb/debian/base-files@11.1%2Bdeb11u7?distro=debian-bullseye', + purl: 'pkg:deb/debian/base-files@11.1%2Bdeb11u7?distro=debian-bullseye', version: '11.1+deb11u7', }, }, @@ -57,8 +56,7 @@ describe('snyk container', () => { id: 'tzdata@2021a-1+deb11u10', info: { name: 'tzdata', - purl: - 'pkg:deb/debian/tzdata@2021a-1%2Bdeb11u10?distro=debian-bullseye', + purl: 'pkg:deb/debian/tzdata@2021a-1%2Bdeb11u10?distro=debian-bullseye', version: '2021a-1+deb11u10', }, }, @@ -268,11 +266,7 @@ DepGraph end`, }, meta: { org: 'test-org', isPublic: false }, }); - const { - code, - stdout, - stderr, - } = await runSnykCLIWithDebug( + const { code, stdout, stderr } = await runSnykCLIWithDebug( `container sbom --org=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format=spdx2.3+json ${TEST_DISTROLESS_STATIC_IMAGE}`, { env }, ); @@ -300,11 +294,7 @@ DepGraph end`, }, meta: { org: 'test-org', isPublic: false }, }); - const { - code, - stdout, - stderr, - } = await runSnykCLIWithDebug( + const { code, stdout, stderr } = await runSnykCLIWithDebug( `container sbom --org=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format=cyclonedx1.4+json ${TEST_DISTROLESS_STATIC_IMAGE}`, { env }, ); @@ -336,11 +326,7 @@ DepGraph end`, }, meta: { org: 'test-org', isPublic: false }, }); - const { - code, - stdout, - stderr, - } = await runSnykCLIWithDebug( + const { code, stdout, stderr } = await runSnykCLIWithDebug( `container sbom --org=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format=cyclonedx1.5+json ${TEST_DISTROLESS_STATIC_IMAGE}`, { env }, ); @@ -361,6 +347,38 @@ DepGraph end`, TEST_DISTROLESS_STATIC_IMAGE_DEPGRAPH.pkgs.length, ); }); + + it('should print sbom for image - cyclonedx 1.6', async () => { + // return a dep-graph fixture from `/test-dependencies` endpoint + server.setCustomResponse({ + result: { + issues: [], + issuesData: {}, + depGraphData: TEST_DISTROLESS_STATIC_IMAGE_DEPGRAPH, + }, + meta: { org: 'test-org', isPublic: false }, + }); + const { code, stdout, stderr } = await runSnykCLIWithDebug( + `container sbom --org=aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format=cyclonedx1.6+json ${TEST_DISTROLESS_STATIC_IMAGE}`, + { env }, + ); + + let sbom: any; + assertCliExitCode(code, 0, stderr); + + expect(() => { + sbom = JSON.parse(stdout); + }).not.toThrow(); + + expect(sbom.specVersion).toEqual('1.6'); + expect(sbom['$schema']).toEqual( + 'http://cyclonedx.org/schema/bom-1.6.schema.json', + ); + + expect(sbom.components).toHaveLength( + TEST_DISTROLESS_STATIC_IMAGE_DEPGRAPH.pkgs.length, + ); + }); }); describe('snyk container monitor --json output', () => { diff --git a/test/jest/acceptance/snyk-ignore/snyk-ignore.spec.ts b/test/jest/acceptance/snyk-ignore/snyk-ignore.spec.ts index 3a1707fb1e..d179c547d0 100644 --- a/test/jest/acceptance/snyk-ignore/snyk-ignore.spec.ts +++ b/test/jest/acceptance/snyk-ignore/snyk-ignore.spec.ts @@ -83,9 +83,7 @@ describe('snyk ignore', () => { it('create a policy file with exclude, using custom group', async () => { const project = await createProjectFromWorkspace('empty'); - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `ignore --file-path=**/deps/**/*.ts --file-path-group=code --policy-path=${project.path()}`, { cwd: project.path(), env: env }, ); @@ -128,9 +126,7 @@ describe('snyk ignore', () => { it('write a policy file for exclude by providing group, expiry and reason', async () => { const project = await createProjectFromWorkspace('empty'); - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `ignore --file-path=**/deps/**/*.ts --file-path-group=code --reason=unknown-reason --expiry=2099-12-24 --policy-path=${project.path()}`, { cwd: project.path(), env: env }, ); @@ -139,13 +135,15 @@ describe('snyk ignore', () => { const policy = await loadPolicy(project.path()); - expect(policy.exclude.code).toHaveLength(1); - expect(!!policy.exclude.code[0]['**/deps/**/*.ts']).toBeTruthy(); + expect(policy.exclude?.code).toHaveLength(1); + expect(!!policy.exclude?.code[0]['**/deps/**/*.ts']).toBeTruthy(); // Fake creation date - policy.exclude.code[0]['**/deps/**/*.ts'].created = new Date( - '2089-12-24T00:00:00.000Z', - ); + if (policy.exclude) { + policy.exclude.code[0]['**/deps/**/*.ts'].created = new Date( + '2089-12-24T00:00:00.000Z', + ); + } expect(policy.exclude).toMatchObject({ code: [ @@ -173,9 +171,7 @@ describe('snyk ignore', () => { code: ['**/deps/**/*.ts'], }); - const { - code, - } = await runSnykCLI( + const { code } = await runSnykCLI( `ignore --file-path=**/deps/**/*.ts --file-path-group=code --reason=unknown-reason --expiry=2099-12-24`, { cwd: project.path(), env: env }, ); @@ -184,13 +180,15 @@ describe('snyk ignore', () => { const policyAfter = await loadPolicy(project.path()); - expect(policyAfter.exclude.code).toHaveLength(1); - expect(!!policyAfter.exclude.code[0]['**/deps/**/*.ts']).toBeTruthy(); + expect(policyAfter.exclude?.code).toHaveLength(1); + expect(!!policyAfter.exclude?.code[0]['**/deps/**/*.ts']).toBeTruthy(); // Fake creation date - policyAfter.exclude.code[0]['**/deps/**/*.ts'].created = new Date( - '2089-12-24T00:00:00.000Z', - ); + if (policyAfter.exclude) { + policyAfter.exclude.code[0]['**/deps/**/*.ts'].created = new Date( + '2089-12-24T00:00:00.000Z', + ); + } expect(policyAfter.exclude).toMatchObject({ code: [ diff --git a/test/jest/acceptance/snyk-sbom-test/all-projects.spec.ts b/test/jest/acceptance/snyk-sbom-test/all-projects.spec.ts index 3437539705..0b935b29d9 100644 --- a/test/jest/acceptance/snyk-sbom-test/all-projects.spec.ts +++ b/test/jest/acceptance/snyk-sbom-test/all-projects.spec.ts @@ -44,11 +44,7 @@ describe('snyk sbom test (mocked server only)', () => { 'npm-sbom-cdx15.json', ); - const { - code, - stdout, - stderr, - } = await runSnykCLI( + const { code, stdout, stderr } = await runSnykCLI( `sbom test --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --experimental --file ${fileToTest}`, { env }, ); @@ -82,11 +78,7 @@ describe('snyk sbom test (mocked server only)', () => { 'npm-sbom-cdx15.json', ); - const { - code, - stdout, - stderr, - } = await runSnykCLI( + const { code, stdout, stderr } = await runSnykCLI( `sbom test --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --experimental --file ${fileToTest} --json`, { env }, ); @@ -119,10 +111,7 @@ describe('snyk sbom test (mocked server only)', () => { 'npm-sbom-cdx15.json', ); - const { - stdout, - stderr, - } = await runSnykCLI( + const { stdout, stderr } = await runSnykCLI( `sbom test --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --file ${fileToTest}`, { env }, ); @@ -135,10 +124,7 @@ describe('snyk sbom test (mocked server only)', () => { }); test('missing file flag', async () => { - const { - stdout, - stderr, - } = await runSnykCLI( + const { stdout, stderr } = await runSnykCLI( `sbom test --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --experimental`, { env }, ); @@ -152,11 +138,7 @@ describe('snyk sbom test (mocked server only)', () => { test('bad SBOM input', async () => { const fileToTest = path.resolve(getFixturePath('sbom'), 'bad-sbom.json'); - const { - code, - stdout, - stderr, - } = await runSnykCLI( + const { code, stdout, stderr } = await runSnykCLI( `sbom test --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --experimental --file ${fileToTest}`, { env }, ); diff --git a/test/jest/acceptance/snyk-sbom/sbom.spec.ts b/test/jest/acceptance/snyk-sbom/sbom.spec.ts index 23277dc87c..13814ef1d1 100644 --- a/test/jest/acceptance/snyk-sbom/sbom.spec.ts +++ b/test/jest/acceptance/snyk-sbom/sbom.spec.ts @@ -174,6 +174,31 @@ describe('snyk sbom (mocked server only)', () => { ); }); + test('`sbom` generates an SBOM for a single project - CycloneDX 1.6', async () => { + const project = await createProjectFromWorkspace('npm-package'); + + const { code, stdout } = await runSnykCLI( + `sbom --org aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee --format cyclonedx1.6+json --debug`, + { + cwd: project.path(), + env, + }, + ); + let bom: any; + + expect(code).toEqual(0); + expect(() => { + bom = JSON.parse(stdout); + }).not.toThrow(); + + expect(bom.specVersion).toEqual('1.6'); + expect(bom['$schema']).toEqual( + 'http://cyclonedx.org/schema/bom-1.6.schema.json', + ); + expect(bom.metadata.component.name).toEqual('npm-package'); + expect(bom.components).toHaveLength(3); + }); + test('`sbom` retains the exit error code of the underlying SCA process', async () => { const project = await createProject('empty'); diff --git a/test/jest/acceptance/snyk-test/all-projects.spec.ts b/test/jest/acceptance/snyk-test/all-projects.spec.ts index fdc4fd6433..0a19fd9c39 100644 --- a/test/jest/acceptance/snyk-test/all-projects.spec.ts +++ b/test/jest/acceptance/snyk-test/all-projects.spec.ts @@ -198,6 +198,48 @@ describe('snyk test --all-projects (mocked server only)', () => { expect(stderr).toEqual(''); }); + test('`test mono-repo-nested --all-projects` defaults to 4 max depth', async () => { + const project = await createProjectFromWorkspace('mono-repo-nested'); + + const { code, stdout, stderr } = await runSnykCLI('test --all-projects', { + cwd: project.path(), + env, + }); + + const backendRequests = server.getRequests().filter((req: any) => { + return req.url.includes('/api/v1/test'); + }); + + expect(backendRequests).toHaveLength(4); + backendRequests.forEach((req: any) => { + expect(req.method).toEqual('POST'); + expect(req.headers['x-snyk-cli-version']).not.toBeUndefined(); + expect(req.url).toMatch('/api/v1/test'); + }); + + expect(code).toEqual(0); + + const dirSeparator = process.platform === 'win32' ? '\\' : '/'; + + expect(stdout).toMatch('Target file: package-lock.json'); + expect(stdout).toMatch( + `Target file: level-1${dirSeparator}package.json`, + ); + expect(stdout).toMatch( + `Target file: level-1${dirSeparator}level-2${dirSeparator}Gemfile.lock`, + ); + expect(stdout).toMatch( + `Target file: level-1${dirSeparator}level-2${dirSeparator}level-3${dirSeparator}package-lock.json`, + ); + expect(stdout).not.toMatch( + `level-1${dirSeparator}level-2${dirSeparator}level-3${dirSeparator}level-4${dirSeparator}level-5${dirSeparator}package-lock.json`, + ); + expect(stdout).not.toMatch( + `level-1${dirSeparator}level-2${dirSeparator}level-3${dirSeparator}level-4${dirSeparator}level-5${dirSeparator}level-6${dirSeparator}Gemfile.lock`, + ); + expect(stderr).toBe(''); + }); + test('`test empty --all-projects`', async () => { const project = await createProjectFromWorkspace('empty'); diff --git a/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts b/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts index 044d44aefb..c5acd5de11 100644 --- a/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts +++ b/test/jest/acceptance/snyk-test/basic-test-all-languages.spec.ts @@ -80,7 +80,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { const project = await createProjectFromWorkspace('pip-app'); let pythonCommand = 'python'; - await runCommand(pythonCommand, ['--version']).catch(function() { + await runCommand(pythonCommand, ['--version']).catch(function () { pythonCommand = 'python3'; }); @@ -126,7 +126,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { const project = await createProjectFromWorkspace('pip-app'); let wrongPythonCommand = 'pthon'; - await runCommand(wrongPythonCommand, ['--version']).catch(function() { + await runCommand(wrongPythonCommand, ['--version']).catch(function () { wrongPythonCommand = 'pthon3'; }); @@ -143,7 +143,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { const project = await createProjectFromWorkspace('pipenv-app'); let pythonCommand = 'python'; - await runCommand(pythonCommand, ['--version']).catch(function() { + await runCommand(pythonCommand, ['--version']).catch(function () { pythonCommand = 'python3'; }); @@ -240,7 +240,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { 'run `snyk test` on a nuget project using v2 dotnet runtime resolution logic for $fixture', async ({ fixture, targetFile }) => { let prerequisite = await runCommand('dotnet', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); @@ -292,7 +292,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { test('run `snyk test` on a nuget project using v2 dotnet runtime resolution logic with a custom output path', async () => { let prerequisite = await runCommand('dotnet', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); @@ -349,7 +349,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { 'run `snyk test` on a nuget project using v2 dotnet runtime resolution logic with explicit target framework $targetFramework', async ({ targetFramework }) => { let prerequisite = await runCommand('dotnet', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); @@ -436,7 +436,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { test('run `snyk test` on a hex project', async () => { const prerequisite = await runCommand('mix', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); @@ -456,7 +456,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { test('run `snyk test` on a composer project', async () => { const prerequisite = await runCommand('composer', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); @@ -476,7 +476,7 @@ describe('`snyk test` of basic projects for each language/ecosystem', () => { test('run `snyk test` on a sbt project', async () => { const prerequisite = await runCommand('sbt', ['--version']).catch( - function() { + function () { return { code: 1, stderr: '', stdout: '' }; }, ); diff --git a/test/jest/acceptance/woof.spec.ts b/test/jest/acceptance/woof.spec.ts index 9fb86982a6..27fc7cb05a 100644 --- a/test/jest/acceptance/woof.spec.ts +++ b/test/jest/acceptance/woof.spec.ts @@ -33,12 +33,15 @@ describe('woof', () => { }); // test each supported language code - test.each(languages)('Woofs in %s', async ({ langCode, expectedWoof }) => { - const { stdout, code, stderr } = await runSnykCLI( - `woof --language=${langCode}`, - ); - expect(stdout).toContain(expectedWoof); - expect(code).toBe(0); - expect(stderr).toBe(''); - }); + test.concurrent.each(languages)( + 'Woofs in %s', + async ({ langCode, expectedWoof }) => { + const { stdout, code, stderr } = await runSnykCLI( + `woof --language=${langCode}`, + ); + expect(stdout).toContain(expectedWoof); + expect(code).toBe(0); + expect(stderr).toBe(''); + }, + ); }); diff --git a/test/jest/unit/cli/commands/test/iac/v2/index.spec.ts b/test/jest/unit/cli/commands/test/iac/v2/index.spec.ts index 2426fabd53..b58ad34eab 100644 --- a/test/jest/unit/cli/commands/test/iac/v2/index.spec.ts +++ b/test/jest/unit/cli/commands/test/iac/v2/index.spec.ts @@ -138,8 +138,7 @@ describe('test', () => { "Test Failures\n\n The Snyk CLI couldn't find any valid IaC configuration files to scan\n Path: invalid_file.txt", formattedUserMessage: "Test Failures\n\n The Snyk CLI couldn't find any valid IaC configuration files to scan\n Path: invalid_file.txt", - json: - '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', + json: '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', jsonStringifiedResults: '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', sarifStringifiedResults: `{\n "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",\n "version": "2.1.0",\n "runs": [\n {\n "originalUriBaseIds": {\n "PROJECTROOT": {\n "uri": "${ @@ -190,8 +189,7 @@ describe('test', () => { ), jsonStringifiedResults: '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', - json: - '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', + json: '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', }), ); }); @@ -255,8 +253,7 @@ describe('test', () => { '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', formattedUserMessage: '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', - json: - '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', + json: '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', jsonStringifiedResults: '[\n {\n "ok": false,\n "code": 2114,\n "error": "",\n "path": "invalid_file.txt"\n }\n]', sarifStringifiedResults: `{\n "$schema": "https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json",\n "version": "2.1.0",\n "runs": [\n {\n "originalUriBaseIds": {\n "PROJECTROOT": {\n "uri": "${ @@ -310,8 +307,7 @@ describe('test', () => { ), jsonStringifiedResults: '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', - json: - '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', + json: '[\n {\n "ok": false,\n "code": 2114,\n "error": "no loadable input: path/to/test",\n "path": "path/to/test"\n }\n]', }), ); }); diff --git a/test/jest/unit/cli/commands/test/set-default-test-options.spec.ts b/test/jest/unit/cli/commands/test/set-default-test-options.spec.ts new file mode 100644 index 0000000000..86b8e935dc --- /dev/null +++ b/test/jest/unit/cli/commands/test/set-default-test-options.spec.ts @@ -0,0 +1,24 @@ +import { setDefaultTestOptions } from '../../../../../../src/cli/commands/test/set-default-test-options'; + +describe('setDefaultTestOptions', () => { + it('default options', () => { + const options = {}; + const result = setDefaultTestOptions(options as any); + expect(result.showVulnPaths).toEqual('some'); + expect(result.maxVulnPaths).toBeUndefined(); + }); + + it('explicit max-vulnerable-paths', () => { + const options = { 'max-vulnerable-paths': 42 }; + const result = setDefaultTestOptions(options as any); + expect(result.showVulnPaths).toEqual('some'); + expect(result.maxVulnPaths).toEqual(42); + }); + + it('explicit show-vulnerable-paths', () => { + const options = { 'show-vulnerable-paths': 'all' }; + const result = setDefaultTestOptions(options as any); + expect(result.showVulnPaths).toEqual('all'); + expect(result.maxVulnPaths).toBeUndefined(); + }); +}); diff --git a/test/jest/unit/cli/commands/types.spec.ts b/test/jest/unit/cli/commands/types.spec.ts index 7ea68e10c3..18a9786460 100644 --- a/test/jest/unit/cli/commands/types.spec.ts +++ b/test/jest/unit/cli/commands/types.spec.ts @@ -14,27 +14,26 @@ test('createHumanReadableTestCommandResult', () => { }); test('createJsonTestCommandResult', () => { - const result = TestCommandResult.createJsonTestCommandResult( - '{ json result}', - ); + const result = + TestCommandResult.createJsonTestCommandResult('{ json result}'); expect(result.toString()).toEqual('{ json result}'); expect(result.getDisplayResults()).toEqual('{ json result}'); expect(result.getJsonResult()).toEqual('{ json result}'); }); test('CommandResult is a HumanReadableTestCommandResult', () => { - const result: CommandResult = TestCommandResult.createHumanReadableTestCommandResult( - 'hr result', - '{ json result}', - ); + const result: CommandResult = + TestCommandResult.createHumanReadableTestCommandResult( + 'hr result', + '{ json result}', + ); expect(result.toString()).toEqual('hr result'); expect(result.getDisplayResults()).toEqual('hr result'); }); test('CommandResult is a JsonTestCommandResult', () => { - const result: CommandResult = TestCommandResult.createJsonTestCommandResult( - '{ json result}', - ); + const result: CommandResult = + TestCommandResult.createJsonTestCommandResult('{ json result}'); expect(result.toString()).toEqual('{ json result}'); expect(result.getDisplayResults()).toEqual('{ json result}'); }); diff --git a/test/jest/unit/dev-count-analysis.spec.ts b/test/jest/unit/dev-count-analysis.spec.ts index c9c994364d..babb90b390 100644 --- a/test/jest/unit/dev-count-analysis.spec.ts +++ b/test/jest/unit/dev-count-analysis.spec.ts @@ -25,10 +25,11 @@ describe('cli dev count via git log analysis', () => { const timestampEpochSecondsEndOfPeriod = Math.floor( TIMESTAMP_TO_TEST / 1000, ); - const timestampEpochSecondsStartOfPeriod = getTimestampStartOfContributingDevTimeframe( - new Date(TIMESTAMP_TO_TEST), - 10, - ); + const timestampEpochSecondsStartOfPeriod = + getTimestampStartOfContributingDevTimeframe( + new Date(TIMESTAMP_TO_TEST), + 10, + ); const withMergesGitLogCommand = `git --no-pager log --pretty=tformat:"%H${SERIOUS_DELIMITER}%an${SERIOUS_DELIMITER}%ae${SERIOUS_DELIMITER}%aI${SERIOUS_DELIMITER}%s" --after="${timestampEpochSecondsStartOfPeriod}" --until="${timestampEpochSecondsEndOfPeriod}" --max-count=${MAX_COMMITS_IN_GIT_LOG}`; const withMergesGitLogStdout: string = await execShell( @@ -123,15 +124,13 @@ describe('cli dev count via git log analysis', () => { expect(uniqueAuthors.has('someemail-1@somedomain.com')).toBeTruthy(); expect(uniqueAuthors.has('someemail-2@somedomain.com')).toBeTruthy(); - const mostRecentCommitTimestampSomeEmail1 = stats.getMostRecentCommitTimestamp( - 'someemail-1@somedomain.com', - ); + const mostRecentCommitTimestampSomeEmail1 = + stats.getMostRecentCommitTimestamp('someemail-1@somedomain.com'); expect(mostRecentCommitTimestampSomeEmail1).toEqual( '2020-02-06T11:43:11+00:00', ); - const mostRecentCommitTimestampSomeEmail2 = stats.getMostRecentCommitTimestamp( - 'someemail-2@somedomain.com', - ); + const mostRecentCommitTimestampSomeEmail2 = + stats.getMostRecentCommitTimestamp('someemail-2@somedomain.com'); expect(mostRecentCommitTimestampSomeEmail2).toEqual( '2020-02-02T23:31:13+02:00', ); @@ -183,15 +182,13 @@ describe('cli dev count via git log analysis', () => { expect(uniqueAuthors).toContain('someemail-1@somedomain.com'); expect(uniqueAuthors).toContain('someemail-2@somedomain.com'); - const mostRecentCommitTimestampSomeEmail1 = stats.getMostRecentCommitTimestamp( - 'someemail-1@somedomain.com', - ); + const mostRecentCommitTimestampSomeEmail1 = + stats.getMostRecentCommitTimestamp('someemail-1@somedomain.com'); expect(mostRecentCommitTimestampSomeEmail1).toEqual( '2020-02-06T11:43:11+00:00', ); - const mostRecentCommitTimestampSomeEmail2 = stats.getMostRecentCommitTimestamp( - 'someemail-2@somedomain.com', - ); + const mostRecentCommitTimestampSomeEmail2 = + stats.getMostRecentCommitTimestamp('someemail-2@somedomain.com'); expect(mostRecentCommitTimestampSomeEmail2).toEqual( '2020-02-02T23:31:13+02:00', ); diff --git a/test/jest/unit/iac/cli-share-results.fixtures.ts b/test/jest/unit/iac/cli-share-results.fixtures.ts index 8fc4153008..9c9943875a 100644 --- a/test/jest/unit/iac/cli-share-results.fixtures.ts +++ b/test/jest/unit/iac/cli-share-results.fixtures.ts @@ -120,6 +120,7 @@ export const expectedEnvelopeFormatterResults = [ name: 'projectA', policy: '', target: { + name: undefined, remoteUrl: 'http://github.com/snyk/cli.git', }, targetReference: undefined, @@ -168,12 +169,12 @@ export const expectedEnvelopeFormatterResults = [ }, ]; -export const expectedEnvelopeFormatterResultsWithPolicy = expectedEnvelopeFormatterResults.map( - (result) => { +export const expectedEnvelopeFormatterResultsWithPolicy = + expectedEnvelopeFormatterResults.map((result) => { return { ...result, policy: `# Snyk (https://snyk.io) policy file, patches or ignores known vulnerabilities. -version: v1.25.0 +version: v1.25.1 # ignores vulnerabilities until expiry date; change duration by modifying expiry date ignore: SNYK-CC-TF-4: @@ -182,8 +183,7 @@ ignore: patch: {} `, }; - }, -); + }); export const createEnvelopeFormatterResultsWithTargetRef = ( targetReference: string, diff --git a/test/jest/unit/iac/cli-share-results.spec.ts b/test/jest/unit/iac/cli-share-results.spec.ts index 422b16b37c..f0e63d29b1 100644 --- a/test/jest/unit/iac/cli-share-results.spec.ts +++ b/test/jest/unit/iac/cli-share-results.spec.ts @@ -7,13 +7,15 @@ import { } from './cli-share-results.fixtures'; import * as request from '../../../../src/lib/request/request'; import * as envelopeFormatters from '../../../../src/lib/iac/envelope-formatters'; -import { Policy } from '../../../../src/lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import * as snykPolicyLib from 'snyk-policy'; describe('CLI Share Results', () => { let snykPolicy: Policy; let requestSpy: jest.SpiedFunction; - let envelopeFormattersSpy: jest.SpiedFunction; + let envelopeFormattersSpy: jest.SpiedFunction< + typeof envelopeFormatters.convertIacResultToScanResult + >; beforeAll(async () => { snykPolicy = await snykPolicyLib.load('test/jest/unit/iac/fixtures'); @@ -52,10 +54,8 @@ describe('CLI Share Results', () => { expect(firstCall[0]).toEqual(scanResults[0]); expect(secondCall[0]).toEqual(scanResults[1]); - const [ - firstCallResult, - secondCallResult, - ] = envelopeFormattersSpy.mock.results; + const [firstCallResult, secondCallResult] = + envelopeFormattersSpy.mock.results; expect(firstCallResult.value).toEqual(expectedEnvelopeFormatterResults[0]); expect(secondCallResult.value).toEqual(expectedEnvelopeFormatterResults[1]); @@ -79,10 +79,8 @@ describe('CLI Share Results', () => { expect(firstCall[0]).toEqual(scanResults[0]); expect(secondCall[0]).toEqual(scanResults[1]); - const [ - firstCallResult, - secondCallResult, - ] = envelopeFormattersSpy.mock.results; + const [firstCallResult, secondCallResult] = + envelopeFormattersSpy.mock.results; expect(firstCallResult.value).toEqual( expectedEnvelopeFormatterResultsWithPolicy[0], @@ -95,9 +93,8 @@ describe('CLI Share Results', () => { describe('when given a target reference', () => { it("should include it in the Envelope's ScanResult interface", async () => { const testTargetRef = 'test-target-ref'; - const expectedEnvelopeFormatterResults = createEnvelopeFormatterResultsWithTargetRef( - testTargetRef, - ); + const expectedEnvelopeFormatterResults = + createEnvelopeFormatterResultsWithTargetRef(testTargetRef); await shareResults({ results: scanResults, @@ -118,10 +115,8 @@ describe('CLI Share Results', () => { expect(firstCall[0]).toEqual(scanResults[0]); expect(secondCall[0]).toEqual(scanResults[1]); - const [ - firstCallResult, - secondCallResult, - ] = envelopeFormattersSpy.mock.results; + const [firstCallResult, secondCallResult] = + envelopeFormattersSpy.mock.results; expect(firstCallResult.value).toEqual( expectedEnvelopeFormatterResults[0], ); diff --git a/test/jest/unit/iac/directory-loader.spec.ts b/test/jest/unit/iac/directory-loader.spec.ts index cbb35f17f6..09051e9791 100644 --- a/test/jest/unit/iac/directory-loader.spec.ts +++ b/test/jest/unit/iac/directory-loader.spec.ts @@ -114,13 +114,11 @@ describe('getAllDirectoriesForPath', () => { mockFs({ [level1Directory]: { [path.basename(level2Directory)]: { - [path.basename( - level2FileStub.filePath, - )]: level2FileStub.fileContent, + [path.basename(level2FileStub.filePath)]: + level2FileStub.fileContent, [path.basename(level3Directory)]: { - [path.basename( - level3FileStub.filePath, - )]: level3FileStub.fileContent, + [path.basename(level3FileStub.filePath)]: + level3FileStub.fileContent, }, }, }, @@ -130,9 +128,8 @@ describe('getAllDirectoriesForPath', () => { describe('with 1 directory', () => { describe('with 2 directories', () => { it('returns the files at level 2', () => { - const directoryFilePaths = getAllDirectoriesForPath( - level1Directory, - ); + const directoryFilePaths = + getAllDirectoriesForPath(level1Directory); const level2Dir = path.join( level1Directory, path.basename(level2Directory), diff --git a/test/jest/unit/iac/file-parser.kubernetes.fixtures.ts b/test/jest/unit/iac/file-parser.kubernetes.fixtures.ts index f70688caf1..c163d30bec 100644 --- a/test/jest/unit/iac/file-parser.kubernetes.fixtures.ts +++ b/test/jest/unit/iac/file-parser.kubernetes.fixtures.ts @@ -213,13 +213,14 @@ export const insufficientIndentationYamlErrorFileDataStub: IacFileData = { filePath: 'dont-care', fileType: 'yml', }; -export const expectedInsufficientIndentationYamlErrorFileParsingResult: IacFileParsed = { - ...insufficientIndentationYamlErrorFileDataStub, - docId: 0, - projectType: IacProjectType.CLOUDFORMATION, - engineType: EngineType.CloudFormation, - jsonContent: yamlWithInsufficientIndentationFileJSON, -}; +export const expectedInsufficientIndentationYamlErrorFileParsingResult: IacFileParsed = + { + ...insufficientIndentationYamlErrorFileDataStub, + docId: 0, + projectType: IacProjectType.CLOUDFORMATION, + engineType: EngineType.CloudFormation, + jsonContent: yamlWithInsufficientIndentationFileJSON, + }; export const expectedKubernetesYamlParsingResult: IacFileParsed = { ...kubernetesYamlFileDataStub, diff --git a/test/jest/unit/iac/file-parser.spec.ts b/test/jest/unit/iac/file-parser.spec.ts index 1f86d74455..71b1b7e9fc 100644 --- a/test/jest/unit/iac/file-parser.spec.ts +++ b/test/jest/unit/iac/file-parser.spec.ts @@ -144,7 +144,8 @@ describe('parseFiles', () => { [ { fileStub: insufficientIndentationYamlErrorFileDataStub, - expectedParsingResult: expectedInsufficientIndentationYamlErrorFileParsingResult, + expectedParsingResult: + expectedInsufficientIndentationYamlErrorFileParsingResult, }, ], ])( diff --git a/test/jest/unit/iac/file-scanner.spec.ts b/test/jest/unit/iac/file-scanner.spec.ts index 3ce4263c5d..7ccf601037 100644 --- a/test/jest/unit/iac/file-scanner.spec.ts +++ b/test/jest/unit/iac/file-scanner.spec.ts @@ -152,9 +152,8 @@ describe('validateResultFromCustomRules', () => { }; it('does not filter out valid policies', () => { - const { validatedResult, invalidIssues } = validateResultFromCustomRules( - result, - ); + const { validatedResult, invalidIssues } = + validateResultFromCustomRules(result); expect(validatedResult.violatedPolicies).toEqual( expect.arrayContaining([ expect.objectContaining({ publicId: 'CUSTOM-RULE-VALID' }), @@ -164,9 +163,8 @@ describe('validateResultFromCustomRules', () => { }); it('filters out policies with invalid severity', () => { - const { validatedResult, invalidIssues } = validateResultFromCustomRules( - result, - ); + const { validatedResult, invalidIssues } = + validateResultFromCustomRules(result); expect(validatedResult.violatedPolicies).not.toEqual( expect.arrayContaining([ expect.objectContaining({ publicId: 'CUSTOM-RULE-INVALID-SEVERITY' }), @@ -184,9 +182,8 @@ describe('validateResultFromCustomRules', () => { }); it('filters out policies with lowercase publicId', () => { - const { validatedResult, invalidIssues } = validateResultFromCustomRules( - result, - ); + const { validatedResult, invalidIssues } = + validateResultFromCustomRules(result); expect(validatedResult.violatedPolicies).not.toEqual( expect.arrayContaining([ expect.objectContaining({ @@ -206,9 +203,8 @@ describe('validateResultFromCustomRules', () => { }); it('filters out policies with conflicting publicId', () => { - const { validatedResult, invalidIssues } = validateResultFromCustomRules( - result, - ); + const { validatedResult, invalidIssues } = + validateResultFromCustomRules(result); expect(validatedResult.violatedPolicies).not.toEqual( expect.arrayContaining([ expect.objectContaining({ publicId: 'SNYK-CC-CUSTOM-RULE-INVALID' }), diff --git a/test/jest/unit/iac/process-results/policy.spec.ts b/test/jest/unit/iac/process-results/policy.spec.ts index 9c9551ce3b..c4617fdc2b 100644 --- a/test/jest/unit/iac/process-results/policy.spec.ts +++ b/test/jest/unit/iac/process-results/policy.spec.ts @@ -16,7 +16,7 @@ async function filterFixture(policyName: string) { // The policy library modifies its input. In order to write meaningful // assertions, deep-clone the original fixture. - const filtered = filterIgnoredIssues(policy, cloneDeep(fixture)); + const filtered = filterIgnoredIssues(policy || undefined, cloneDeep(fixture)); return { fixture: fixture, diff --git a/test/jest/unit/iac/results-formatter.fixtures.ts b/test/jest/unit/iac/results-formatter.fixtures.ts index 72644890a7..ad9a8c17a2 100644 --- a/test/jest/unit/iac/results-formatter.fixtures.ts +++ b/test/jest/unit/iac/results-formatter.fixtures.ts @@ -179,18 +179,16 @@ export const expectedFormattedResultsWithLineNumber = generateFormattedResults({ withLineNumber: true, }, }); -export const expectedFormattedResultsWithoutLineNumber = generateFormattedResults( - { +export const expectedFormattedResultsWithoutLineNumber = + generateFormattedResults({ cloudConfigResultsOptions: { withLineNumber: false, }, - }, -); -export const expectedFormattedResultsGeneratedByCustomRules = generateFormattedResults( - { + }); +export const expectedFormattedResultsGeneratedByCustomRules = + generateFormattedResults({ cloudConfigResultsOptions: { isGeneratedByCustomRule: true, }, packageManager: IacProjectType.CUSTOM, - }, -); + }); diff --git a/test/jest/unit/iac/rules/oci-pull.spec.ts b/test/jest/unit/iac/rules/oci-pull.spec.ts index 2bf3e4da75..efa07f9f59 100644 --- a/test/jest/unit/iac/rules/oci-pull.spec.ts +++ b/test/jest/unit/iac/rules/oci-pull.spec.ts @@ -66,25 +66,25 @@ describe('extractOCIRegistryURLComponents', () => { }); }); - it('throws an error if a URL with an empty registry host is provided', function() { + it('throws an error if a URL with an empty registry host is provided', function () { expect(() => { extractOCIRegistryURLComponents('https:///repository:0.2.0'); }).toThrow(InvalidRemoteRegistryURLError); }); - it('throws an error if a URL without a path is provided', function() { + it('throws an error if a URL without a path is provided', function () { expect(() => { extractOCIRegistryURLComponents('https://registry'); }).toThrow(InvalidRemoteRegistryURLError); }); - it('throws an error if a URL with an empty path is provided', function() { + it('throws an error if a URL with an empty path is provided', function () { expect(() => { extractOCIRegistryURLComponents('https://registry/'); }).toThrow(InvalidRemoteRegistryURLError); }); - it('throws an error if a URL with an empty repository name is provided', function() { + it('throws an error if a URL with an empty repository name is provided', function () { expect(() => { extractOCIRegistryURLComponents('https://registry/:'); }).toThrow(InvalidRemoteRegistryURLError); diff --git a/test/jest/unit/iac/service-mappings.spec.ts b/test/jest/unit/iac/service-mappings.spec.ts index b0c0ddc3dd..f4631b59e4 100644 --- a/test/jest/unit/iac/service-mappings.spec.ts +++ b/test/jest/unit/iac/service-mappings.spec.ts @@ -6,27 +6,27 @@ import { verifyServiceMappingExists, } from '../../../../src/lib/iac/service-mappings'; -describe('service-mappings', function() { - describe('validation', function() { - it('throws an error when an unknown service is used', function() { +describe('service-mappings', function () { + describe('validation', function () { + it('throws an error when an unknown service is used', function () { expect(() => { verifyServiceMappingExists(['no-such-service']); }).toThrow(InvalidServiceError); }); - it('throws an error when an empty service list is used', function() { + it('throws an error when an empty service list is used', function () { expect(() => { verifyServiceMappingExists([]); }).toThrow(InvalidServiceError); }); - it('does not throw an error when a known service is used', function() { + it('does not throw an error when a known service is used', function () { expect(() => { verifyServiceMappingExists(Array.from(services2resources.keys())); }).not.toThrow(InvalidServiceError); }); }); - describe('ignore pattern creation', function() { - it('should create the correct pattern', function() { + describe('ignore pattern creation', function () { + it('should create the correct pattern', function () { const service = Array.from(services2resources.keys())[0]; const pattern = createIgnorePattern([service]); let expected = '*'; @@ -36,7 +36,7 @@ describe('service-mappings', function() { expect(pattern).toBe(expected); }); - it('should not include the same ignore pattern replicated multiple times', function() { + it('should not include the same ignore pattern replicated multiple times', function () { const services = new Map>([ ['service1', ['duplicate']], ['service2', ['duplicate']], diff --git a/test/jest/unit/iac/url-utils.spec.ts b/test/jest/unit/iac/url-utils.spec.ts index 8adc32341a..c5b8874e18 100644 --- a/test/jest/unit/iac/url-utils.spec.ts +++ b/test/jest/unit/iac/url-utils.spec.ts @@ -1,14 +1,14 @@ import { isValidUrl } from '../../../../src/cli/commands/test/iac/local-execution/url-utils'; -describe('url-utils.ts', function() { - describe('isValidUrl', function() { - describe('Given a valid URL', function() { - describe('With a protocol - it returns true', function() { +describe('url-utils.ts', function () { + describe('isValidUrl', function () { + describe('Given a valid URL', function () { + describe('With a protocol - it returns true', function () { it.each([ 'https://valid.io/url', 'https://valid.io/url:latest', 'https://valid.io/url:0.1.0', - ])('%s', function(urlStr) { + ])('%s', function (urlStr) { // Act const result = isValidUrl(urlStr); @@ -17,10 +17,10 @@ describe('url-utils.ts', function() { }); }); - describe('Without a protocol - it returns true', function() { + describe('Without a protocol - it returns true', function () { it.each(['valid.io/url', 'valid.io/url:latest', 'valid.io/url:0.1.0'])( '%s', - function(urlStr) { + function (urlStr) { // Act const result = isValidUrl(urlStr); @@ -31,13 +31,13 @@ describe('url-utils.ts', function() { }); }); - describe('When given an invalid URL', function() { - describe('With a protocol - it returns false', function() { + describe('When given an invalid URL', function () { + describe('With a protocol - it returns false', function () { it.each([ 'http://an/invalid/url', 'https://an-invalid-url', 'http://:an_invalid/url', - ])('%s', function(urlStr: string) { + ])('%s', function (urlStr: string) { const result = isValidUrl(urlStr); // Assert @@ -45,10 +45,10 @@ describe('url-utils.ts', function() { }); }); - describe('Without a protocol - it returns false', function() { + describe('Without a protocol - it returns false', function () { it.each(['an/invalid/url', 'an-invalid-url', ':an_invalid/url'])( '%s', - function(urlStr: string) { + function (urlStr: string) { const result = isValidUrl(urlStr); // Assert diff --git a/test/jest/unit/lib/analytics/utils.ts b/test/jest/unit/lib/analytics/utils.ts index 35119a9a09..eaab89e703 100644 --- a/test/jest/unit/lib/analytics/utils.ts +++ b/test/jest/unit/lib/analytics/utils.ts @@ -6,10 +6,10 @@ import { ArgsOptions } from '../../../../../src/cli/args'; * @returns a ArgsOptions[] with just the stuff we need for the tests. */ export function argsFrom(args: { [key: string]: string }): ArgsOptions[] { - const fullArgs = ([ + const fullArgs = [ { ...args, }, - ] as any) as ArgsOptions[]; + ] as any as ArgsOptions[]; return fullArgs; } diff --git a/test/jest/unit/lib/ecosystems/policy.spec.ts b/test/jest/unit/lib/ecosystems/policy.spec.ts index 4b72c031b3..142dab2b5c 100644 --- a/test/jest/unit/lib/ecosystems/policy.spec.ts +++ b/test/jest/unit/lib/ecosystems/policy.spec.ts @@ -1,5 +1,5 @@ import { Issue, IssuesData } from '../../../../../src/lib/ecosystems/types'; -import { Policy } from '../../../../../src/lib/policy/find-and-load-policy'; +import * as snykPolicyLib from 'snyk-policy'; import { SEVERITY } from '@snyk/fix/dist/types'; import { filterIgnoredIssues } from '../../../../../src/lib/ecosystems/policy'; @@ -9,7 +9,7 @@ describe('filterIgnoredIssues fn', () => { const getFutureDate = () => new Date(Date.now() + ONE_HOUR_MS); const getPastDate = () => new Date(Date.now() - ONE_HOUR_MS); - it('should filter the not-expired ignored issues', () => { + it('should filter the not-expired ignored issues', async () => { const issues: Issue[] = [ { pkgName: 'https://foo.bar|test1', @@ -42,24 +42,24 @@ describe('filterIgnoredIssues fn', () => { id: 'SNYK-TEST-2', }, }; - const policy = { + const policy = await snykPolicyLib.loadFromText(`{ ignore: { 'SNYK-TEST-1': [ { '*': { reason: 'None Given', - created: getCurrentDate(), - expires: getFutureDate(), + created: ${getCurrentDate()}, + expires: ${getFutureDate()}, }, }, ], }, - }; + }`); const [filteredIssues, filteredIssuesData] = filterIgnoredIssues( issues, issuesData, - policy as Policy, + policy, ); expect(filteredIssues).toEqual([ @@ -82,7 +82,7 @@ describe('filterIgnoredIssues fn', () => { }); }); - it('should not filter the expired ignored issues', () => { + it('should not filter the expired ignored issues', async () => { const issues: Issue[] = [ { pkgName: 'https://foo.bar|test1', @@ -115,31 +115,31 @@ describe('filterIgnoredIssues fn', () => { id: 'SNYK-TEST-2', }, }; - const policy = { + const policy = await snykPolicyLib.loadFromText(`{ ignore: { 'SNYK-TEST-1': [ { '*': { reason: 'None Given', - created: getCurrentDate(), - expires: getPastDate(), + created: ${getCurrentDate()}, + expires: ${getPastDate()}, }, }, ], }, - }; + }`); const [filteredIssues, filteredIssuesData] = filterIgnoredIssues( issues, issuesData, - policy as Policy, + policy, ); expect(filteredIssues).toEqual(issues); expect(filteredIssuesData).toEqual(issuesData); }); - it('should handle empty issue array', () => { + it('should handle empty issue array', async () => { const issues: Issue[] = []; const issuesData: IssuesData = { 'SNYK-TEST-1': { @@ -148,31 +148,31 @@ describe('filterIgnoredIssues fn', () => { id: 'SNYK-TEST-1', }, }; - const policy = { + const policy = await snykPolicyLib.loadFromText(`{ ignore: { 'SNYK-TEST-1': [ { '*': { reason: 'None Given', - created: getCurrentDate(), - expires: getFutureDate(), + created: ${getCurrentDate()}, + expires: ${getFutureDate()}, }, }, ], }, - }; + }`); const [filteredIssues, filteredIssuesData] = filterIgnoredIssues( issues, issuesData, - policy as Policy, + policy, ); expect(filteredIssues).toEqual([]); expect(filteredIssuesData).toEqual(issuesData); }); - it('should handle empty issues data object', () => { + it('should handle empty issues data object', async () => { const issues: Issue[] = [ { pkgName: 'https://foo.bar|test1', @@ -185,24 +185,24 @@ describe('filterIgnoredIssues fn', () => { }, ]; const issuesData: IssuesData = {}; - const policy = { + const policy = await snykPolicyLib.loadFromText(`{ ignore: { 'SNYK-TEST-1': [ { '*': { reason: 'None Given', - created: getCurrentDate(), - expires: getFutureDate(), + created: ${getCurrentDate()}, + expires: ${getFutureDate()}, }, }, ], }, - }; + }`); const [filteredIssues, filteredIssuesData] = filterIgnoredIssues( issues, issuesData, - policy as Policy, + policy, ); expect(filteredIssues).toEqual([]); @@ -240,7 +240,7 @@ describe('filterIgnoredIssues fn', () => { expect(filteredIssuesData).toEqual(issuesData); }); - it('should handle empty policy file', () => { + it('should handle empty policy file', async () => { const issues: Issue[] = [ { pkgName: 'https://foo.bar|test1', @@ -259,14 +259,14 @@ describe('filterIgnoredIssues fn', () => { id: 'SNYK-TEST-1', }, }; - const policy = { + const policy = await snykPolicyLib.loadFromText(`{ ignore: {}, - }; + }`); const [filteredIssues, filteredIssuesData] = filterIgnoredIssues( issues, issuesData, - policy as Policy, + policy, ); expect(filteredIssues).toEqual(issues); diff --git a/test/jest/unit/lib/ecosystems/resolve-monitor.facts.spec.ts b/test/jest/unit/lib/ecosystems/resolve-monitor.facts.spec.ts index a67bcc3dfb..237c09d1c1 100644 --- a/test/jest/unit/lib/ecosystems/resolve-monitor.facts.spec.ts +++ b/test/jest/unit/lib/ecosystems/resolve-monitor.facts.spec.ts @@ -267,12 +267,12 @@ describe('resolve and test facts', () => { path: 'random-fake-path', }); - await resolveAndMonitorFacts(scanResults, ({ + await resolveAndMonitorFacts(scanResults, { 'project-tags': 'tag1=value1,tag2=value2,tag3=value3', 'project-business-criticality': 'medium', 'project-environment': 'saas', 'project-lifecycle': 'development', - } as unknown) as Options); + } as unknown as Options); expect(httpClientSpy).toHaveBeenCalledTimes(1); expect(httpClientSpy).toBeCalledWith( expect.objectContaining({ diff --git a/test/jest/unit/lib/formatters/get-sarif-result.spec.ts b/test/jest/unit/lib/formatters/get-sarif-result.spec.ts index e2da748b26..c45c7464ef 100644 --- a/test/jest/unit/lib/formatters/get-sarif-result.spec.ts +++ b/test/jest/unit/lib/formatters/get-sarif-result.spec.ts @@ -2,11 +2,13 @@ import { getResults } from '../../../../../src/lib/formatters/get-sarif-result'; import { SEVERITY, TestResult } from '../../../../../src/lib/snyk-test/legacy'; describe('Retrieving sarif result', () => { - const cases: Array<[ - string, - { path: string; displayTargetFile?: string }, - { resultLocationUri: string }, - ]> = [ + const cases: Array< + [ + string, + { path: string; displayTargetFile?: string }, + { resultLocationUri: string }, + ] + > = [ [ 'should return the path given there is no target file present', { path: 'alpine' }, @@ -20,8 +22,7 @@ describe('Retrieving sarif result', () => { [ 'should return the path without colon characters given there is no target file present and the path contains a digest', { - path: - 'alpine@sha256:c0669ef34cdc14332c0f1ab0c2c01acb91d96014b172f1a76f3a39e63d1f0bda', + path: 'alpine@sha256:c0669ef34cdc14332c0f1ab0c2c01acb91d96014b172f1a76f3a39e63d1f0bda', }, { resultLocationUri: @@ -49,8 +50,7 @@ describe('Retrieving sarif result', () => { fixes: undefined, level: 'error', message: { - text: - 'This file introduces a vulnerable expat package with a critical severity vulnerability.', + text: 'This file introduces a vulnerable expat package with a critical severity vulnerability.', }, locations: [ { diff --git a/test/jest/unit/lib/formatters/iac-output/text/formatters.spec.ts b/test/jest/unit/lib/formatters/iac-output/text/formatters.spec.ts index 9b636c0add..049b17d2ac 100644 --- a/test/jest/unit/lib/formatters/iac-output/text/formatters.spec.ts +++ b/test/jest/unit/lib/formatters/iac-output/text/formatters.spec.ts @@ -80,22 +80,23 @@ describe('formatSnykIacTestTestData', () => { 'utf-8', ), ); - const snykIacTestOutputWithSuppressionsFixture: SnykIacTestOutput = JSON.parse( - fs.readFileSync( - path.join( - __dirname, - '..', - '..', - '..', - '..', - 'iac', - 'process-results', - 'fixtures', - 'snyk-iac-test-results-with-suppressions.json', + const snykIacTestOutputWithSuppressionsFixture: SnykIacTestOutput = + JSON.parse( + fs.readFileSync( + path.join( + __dirname, + '..', + '..', + '..', + '..', + 'iac', + 'process-results', + 'fixtures', + 'snyk-iac-test-results-with-suppressions.json', + ), + 'utf-8', ), - 'utf-8', - ), - ); + ); const testDataFixture: IacTestData = JSON.parse( fs.readFileSync( diff --git a/test/jest/unit/lib/formatters/iac-output/text/test-summary.spec.ts b/test/jest/unit/lib/formatters/iac-output/text/test-summary.spec.ts index fab20b555d..86b0bb2396 100644 --- a/test/jest/unit/lib/formatters/iac-output/text/test-summary.spec.ts +++ b/test/jest/unit/lib/formatters/iac-output/text/test-summary.spec.ts @@ -82,10 +82,10 @@ describe('formatIacTestSummary', () => { ${colors.failure.bold('✗')} Files with issues: ${colors.info.bold('3')} Ignored issues: ${colors.info.bold('3')} Total issues: ${colors.info.bold('22')} [ ${colors.severities.critical( - '0 critical', - )}, ${colors.severities.high('5 high')}, ${colors.severities.medium( - '4 medium', - )}, ${colors.severities.low('13 low')} ]`, + '0 critical', + )}, ${colors.severities.high('5 high')}, ${colors.severities.medium( + '4 medium', + )}, ${colors.severities.low('13 low')} ]`, ); expect(result).not.toContain('suppressed issues'); @@ -106,10 +106,10 @@ ${colors.failure.bold('✗')} Files with issues: ${colors.info.bold('3')} Ignored issues: ${colors.info.bold('3')} Cloud context - suppressed issues: ${colors.info.bold('42')} Total issues: ${colors.info.bold('22')} [ ${colors.severities.critical( - '0 critical', - )}, ${colors.severities.high('5 high')}, ${colors.severities.medium( - '4 medium', - )}, ${colors.severities.low('13 low')} ]`, + '0 critical', + )}, ${colors.severities.high('5 high')}, ${colors.severities.medium( + '4 medium', + )}, ${colors.severities.low('13 low')} ]`, ); }); }); diff --git a/test/jest/unit/lib/formatters/open-source-sarif-output.spec.ts b/test/jest/unit/lib/formatters/open-source-sarif-output.spec.ts index e7c5fefa87..bb7c2e42df 100644 --- a/test/jest/unit/lib/formatters/open-source-sarif-output.spec.ts +++ b/test/jest/unit/lib/formatters/open-source-sarif-output.spec.ts @@ -32,10 +32,11 @@ describe('createSarifOutputForOpenSource', () => { displayTargetFile: `${time}/${lockFileName}`, }); const sarif = createSarifOutputForOpenSource([testFile]); - const uri = sarif.runs?.[0]?.results?.[0].locations?.[0]?.physicalLocation?.artifactLocation?.uri?.replace( - `${time}/`, - '', - ); + const uri = + sarif.runs?.[0]?.results?.[0].locations?.[0]?.physicalLocation?.artifactLocation?.uri?.replace( + `${time}/`, + '', + ); expect(uri).toMatchSnapshot(); }), ); diff --git a/test/jest/unit/lib/iac/drift/drift.spec.ts b/test/jest/unit/lib/iac/drift/drift.spec.ts index b37b9f3734..9781e8ebe0 100644 --- a/test/jest/unit/lib/iac/drift/drift.spec.ts +++ b/test/jest/unit/lib/iac/drift/drift.spec.ts @@ -17,7 +17,7 @@ import { import { addIacDriftAnalytics } from '../../../../../../src/cli/commands/test/iac/local-execution/analytics'; import * as analytics from '../../../../../../src/lib/analytics'; import * as snykPolicy from 'snyk-policy'; -import { Policy } from '../../../../../../src/lib/policy/find-and-load-policy'; +import { Policy } from 'snyk-policy'; import { DCTL_EXIT_CODES, driftctlVersion, @@ -296,6 +296,8 @@ describe('updateExcludeInPolicy', () => { 'policy-no-excludes.yml', {}, { + code: [], + global: [], 'iac-drift': [ 'aws_iam_user.test-driftctl2', 'aws_iam_access_key.AKIA5QYBVVD2Y6PBAAPY', @@ -358,6 +360,8 @@ describe('updateExcludeInPolicy', () => { 'exclude-missing': true, }, { + code: [], + global: [], 'iac-drift': [ 'aws_s3_bucket_policy.driftctl', 'aws_s3_bucket_notification.driftctl', @@ -371,6 +375,8 @@ describe('updateExcludeInPolicy', () => { 'exclude-unmanaged': true, }, { + code: [], + global: [], 'iac-drift': [ 'aws_iam_user.test-driftctl2', 'aws_iam_access_key.AKIA5QYBVVD2Y6PBAAPY', diff --git a/test/jest/unit/lib/iac/test/v2/sarif.spec.ts b/test/jest/unit/lib/iac/test/v2/sarif.spec.ts index ec201a2be0..5b80513428 100644 --- a/test/jest/unit/lib/iac/test/v2/sarif.spec.ts +++ b/test/jest/unit/lib/iac/test/v2/sarif.spec.ts @@ -40,9 +40,8 @@ describe('convertEngineToSarifResults', () => { integratedSarifOutputFixtureContent, ); - integratedSarifOutputFixture.runs[0].originalUriBaseIds!.PROJECTROOT.uri = pathToFileURL( - process.cwd() + '/', - ).href; + integratedSarifOutputFixture.runs[0].originalUriBaseIds!.PROJECTROOT.uri = + pathToFileURL(process.cwd() + '/').href; it('returns expected SARIF result', () => { const result = convertEngineToSarifResults(snykIacTestFixture); diff --git a/test/jest/unit/lib/plugins/yarn-workspaces-parser.spec.ts b/test/jest/unit/lib/plugins/yarn-workspaces-parser.spec.ts index a5ddc944c4..69809837e0 100644 --- a/test/jest/unit/lib/plugins/yarn-workspaces-parser.spec.ts +++ b/test/jest/unit/lib/plugins/yarn-workspaces-parser.spec.ts @@ -10,9 +10,10 @@ const yarnWorkspacesMap = { }; const yarnWorkspacesMapWindows = { - 'C:\\snyk\\test\\acceptance\\workspaces\\yarn-workspace-out-of-sync\\package.json': { - workspaces: ['packages'], - }, + 'C:\\snyk\\test\\acceptance\\workspaces\\yarn-workspace-out-of-sync\\package.json': + { + workspaces: ['packages'], + }, 'C:\\snyk\\test\\acceptance\\workspaces\\yarn-workspace\\package.json': { workspaces: ['libs/*/**', 'tools/*'], }, diff --git a/test/jest/unit/lib/unexpected-error.spec.ts b/test/jest/unit/lib/unexpected-error.spec.ts index 5d49f89fef..7fc976be82 100644 --- a/test/jest/unit/lib/unexpected-error.spec.ts +++ b/test/jest/unit/lib/unexpected-error.spec.ts @@ -1,5 +1,5 @@ import * as path from 'path'; -import { runCommand } from '../../util/runCommand'; +import { RunCommandOptions, runCommand } from '../../util/runCommand'; import { getFixturePath } from '../../util/getFixturePath'; /** @@ -15,7 +15,13 @@ import { getFixturePath } from '../../util/getFixturePath'; describe('callHandlingUnexpectedErrors', () => { async function runScript(filename: string) { const file = path.resolve(getFixturePath('unexpected-error'), filename); - return runCommand('node', ['-r', 'ts-node/register', file]); + const options: RunCommandOptions = { + env: { + FORCE_COLOR: '0', + PATH: process.env.PATH, + }, + }; + return runCommand('node', ['-r', 'ts-node/register', file], options); } it('calls the provided callable', async () => { diff --git a/test/jest/unit/metrics.spec.ts b/test/jest/unit/metrics.spec.ts index 43729bbfb5..7e3a98c955 100644 --- a/test/jest/unit/metrics.spec.ts +++ b/test/jest/unit/metrics.spec.ts @@ -1,8 +1,10 @@ const debugMock: string[][] = []; jest.mock('debug', () => { - const factory = (key) => (...args) => { - debugMock.push([key, ...args]); - }; + const factory = + (key) => + (...args) => { + debugMock.push([key, ...args]); + }; factory.default = factory; return factory; }); diff --git a/test/jest/unit/policy-display.spec.ts b/test/jest/unit/policy-display.spec.ts index f47995b388..4e5406a710 100644 --- a/test/jest/unit/policy-display.spec.ts +++ b/test/jest/unit/policy-display.spec.ts @@ -19,11 +19,7 @@ it('test sensibly bails if gets an old .snyk format', async () => { .slice(3) .join('\n'); - const expected = expectedFileString - .trim() - .split('\n') - .slice(3) - .join('\n'); + const expected = expectedFileString.trim().split('\n').slice(3).join('\n'); expect(result).toEqual(expected); }); diff --git a/test/jest/unit/snyk-code/snyk-code-test.spec.ts b/test/jest/unit/snyk-code/snyk-code-test.spec.ts index 445d1a9db5..571b447e9c 100644 --- a/test/jest/unit/snyk-code/snyk-code-test.spec.ts +++ b/test/jest/unit/snyk-code/snyk-code-test.spec.ts @@ -313,24 +313,21 @@ describe('Test snyk code', () => { it.each([ { - name: - 'should write only sarif result to file when only `--sarif-file-output` is used', + name: 'should write only sarif result to file when only `--sarif-file-output` is used', options: { 'sarif-file-output': true, 'json-file-output': false, }, }, { - name: - 'should write only json result to file when only `--json-file-output` is used', + name: 'should write only json result to file when only `--json-file-output` is used', options: { 'sarif-file-output': false, 'json-file-output': true, }, }, { - name: - 'should write sarif and json results to file when `--sarif-file-output` and `--json-file-output` are used', + name: 'should write sarif and json results to file when `--sarif-file-output` and `--json-file-output` are used', options: { 'sarif-file-output': true, 'json-file-output': true, @@ -560,9 +557,8 @@ describe('Test snyk code', () => { await ecosystems.testEcosystem('code', ['some/path'], options); } catch (error) { const errMessage = error.message.trim(); - const expectedOutput = jsonStringifyLargeObject( - sampleSarifResponse, - ).trim(); + const expectedOutput = + jsonStringifyLargeObject(sampleSarifResponse).trim(); // exit code 1 expect(error.code).toBe('VULNS'); @@ -595,9 +591,8 @@ describe('Test snyk code', () => { await snykTest('some/path', options); } catch (error) { const errMessage = error.message.trim(); - const expectedOutput = jsonStringifyLargeObject( - sampleSarifResponse, - ).trim(); + const expectedOutput = + jsonStringifyLargeObject(sampleSarifResponse).trim(); // exit code 1 expect(error.code).toBe('VULNS'); diff --git a/test/setup.js b/test/setup.js index c9f3123d27..961d37f08e 100644 --- a/test/setup.js +++ b/test/setup.js @@ -8,7 +8,7 @@ const { } = require('./jest/util/fipsTestHelper'); const { runSnykCLI } = require('./jest/util/runSnykCLI'); -module.exports = async function() { +module.exports = async function () { if (process.env.TEST_SNYK_COMMAND) { process.env.TEST_SNYK_COMMAND = getCliBinaryPath(); } diff --git a/test/smoke/alpine/entrypoint.sh b/test/smoke/alpine/entrypoint.sh index 64e1d78543..634d916612 100755 --- a/test/smoke/alpine/entrypoint.sh +++ b/test/smoke/alpine/entrypoint.sh @@ -1,6 +1,6 @@ #!/bin/sh -curl -Lo ./snyk-cli 'https://static.snyk.io/cli/latest/snyk-alpine' +curl -Lo ./snyk-cli 'https://downloads.snyk.io/cli/latest/snyk-alpine' chmod -R +x ./snyk-cli mv ./snyk-cli /usr/local/bin/snyk snyk --version diff --git a/test/smoke/install-snyk-binary-win.sh b/test/smoke/install-snyk-binary-win.sh index 8ea1d24e72..6e51370028 100644 --- a/test/smoke/install-snyk-binary-win.sh +++ b/test/smoke/install-snyk-binary-win.sh @@ -1,4 +1,4 @@ -curl -Lo ./snyk-cli.exe 'https://static.snyk.io/cli/latest/snyk-win.exe' +curl -Lo ./snyk-cli.exe 'https://downloads.snyk.io/cli/latest/snyk-win.exe' ./snyk-cli.exe --version chmod -R +x ./snyk-cli mv ./snyk-cli.exe "/bin/snyk.exe" diff --git a/test/tap/cli-monitor.acceptance.test.ts b/test/tap/cli-monitor.acceptance.test.ts index 48d36bb5e3..4fb8e9b0d0 100644 --- a/test/tap/cli-monitor.acceptance.test.ts +++ b/test/tap/cli-monitor.acceptance.test.ts @@ -263,9 +263,9 @@ if (!isWindows) { async inspect() { return { plugin: { name: 'sbt' }, - package: require(getWorkspacePath( - 'sbt-simple-struts/monitor-graph-result.json', - )), + package: require( + getWorkspacePath('sbt-simple-struts/monitor-graph-result.json'), + ), }; }, }; @@ -1475,6 +1475,62 @@ if (!isWindows) { ); }); + test('`monitor cocoapods-app with just Podfile.lock`', async (t) => { + chdirWorkspaces('cocoapods-app'); + const plugin = { + async inspect() { + return { + plugin: { + targetFile: 'Podfile.lock', + name: 'snyk-cocoapods-plugin', + runtime: 'cocoapods', + }, + package: {}, + }; + }, + }; + console.log(plugin); + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('cocoapods').returns(plugin); + + await cli.monitor('./', { + file: 'Podfile.lock', + }); + const req = server.popRequest(); + t.equal(req.method, 'PUT', 'makes PUT request'); + t.equal( + req.headers['x-snyk-cli-version'], + versionNumber, + 'sends version number', + ); + const depGraphJSON = req.body.depGraphJSON; + t.ok(depGraphJSON); + t.match(req.url, '/monitor/cocoapods/graph', 'puts at correct url'); + t.equal( + req.body.targetFile, + 'Podfile.lock', + 'sends the targetFile (Podfile.lock)', + ); + t.same( + spyPlugin.getCall(0).args, + [ + './', + 'Podfile.lock', + { + args: null, + file: 'Podfile.lock', + packageManager: 'cocoapods', + path: './', + }, + snykHttpClient, + ], + 'calls CocoaPods plugin', + ); + }); + test('`monitor large-mono-repo --file=bundler-app/Gemfile` suggest to use --all-projects', async (t) => { chdirWorkspaces('large-mono-repo'); const res = await cli.monitor({ file: 'bundler-app/Gemfile' }); diff --git a/test/tap/cli-monitor/cli-monitor.all-projects.spec.ts b/test/tap/cli-monitor/cli-monitor.all-projects.spec.ts index 841733b8d6..6641a4c694 100644 --- a/test/tap/cli-monitor/cli-monitor.all-projects.spec.ts +++ b/test/tap/cli-monitor/cli-monitor.all-projects.spec.ts @@ -1,6 +1,7 @@ import * as sinon from 'sinon'; import * as path from 'path'; import * as depGraphLib from '@snyk/dep-graph'; +import { getWorkspacePath } from '../../jest/util/getWorkspacePath'; interface AcceptanceTests { language: string; @@ -12,687 +13,701 @@ interface AcceptanceTests { export const AllProjectsTests: AcceptanceTests = { language: 'Mixed', tests: { - '`monitor mono-repo-with-ignores --all-projects` respects .snyk policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.monitor('mono-repo-with-ignores', { - allProjects: true, - detectionDepth: 2, - }); - const requests = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); - let policyCount = 0; - requests.forEach((req) => { - const vulnerableFolderPath = - process.platform === 'win32' - ? 'vulnerable\\package-lock.json' - : 'vulnerable/package-lock.json'; - - if (req.body.targetFileRelativePath.endsWith(vulnerableFolderPath)) { - t.match( - req.body.policy, - 'npm:node-uuid:20160328', - 'body contains policy', - ); - policyCount += 1; - } - }); - t.equal(policyCount, 1, 'one policy found'); - }, - '`monitor monorepo-bad-project --all-projects`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - const spyPlugin = sinon.spy(params.plugins, 'loadPlugin'); - t.teardown(spyPlugin.restore); - let result; - try { - await params.cli.monitor('monorepo-bad-project', { + '`monitor mono-repo-with-ignores --all-projects` respects .snyk policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.monitor('mono-repo-with-ignores', { allProjects: true, + detectionDepth: 2, }); - } catch (error) { - result = error.message; - } - t.ok(spyPlugin.withArgs('rubygems').calledOnce, 'calls rubygems plugin'); - t.ok(spyPlugin.withArgs('yarn').calledOnce, 'calls npm plugin'); - t.ok(spyPlugin.withArgs('maven').notCalled, 'did not call maven plugin'); - - t.match( - result, - 'rubygems/graph/some/project-id', - 'rubygems project was monitored', - ); - t.match( - result, - 'Dependency snyk@* was not found in yarn.lock', - 'yarn project had an error and we displayed it', - ); + const requests = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); + let policyCount = 0; + requests.forEach((req) => { + const vulnerableFolderPath = + process.platform === 'win32' + ? 'vulnerable\\package-lock.json' + : 'vulnerable/package-lock.json'; - const request = params.server.popRequest(); + if (req.body.targetFileRelativePath.endsWith(vulnerableFolderPath)) { + t.match( + req.body.policy, + 'npm:node-uuid:20160328', + 'body contains policy', + ); + policyCount += 1; + } + }); + t.equal(policyCount, 1, 'one policy found'); + }, + '`monitor monorepo-bad-project --all-projects`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const spyPlugin = sinon.spy(params.plugins, 'loadPlugin'); + t.teardown(spyPlugin.restore); + let result; + try { + await params.cli.monitor('monorepo-bad-project', { + allProjects: true, + }); + } catch (error) { + result = error.message; + } + t.ok( + spyPlugin.withArgs('rubygems').calledOnce, + 'calls rubygems plugin', + ); + t.ok(spyPlugin.withArgs('yarn').calledOnce, 'calls npm plugin'); + t.ok( + spyPlugin.withArgs('maven').notCalled, + 'did not call maven plugin', + ); - t.match( - request.url, - '/api/v1/monitor/rubygems/graph', - 'puts at correct url', - ); - t.notOk(request.body.targetFile, "doesn't send the targetFile"); - t.equal(request.method, 'PUT', 'makes PUT request'); - t.equal( - request.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - }, - '`monitor monorepo-with-nuget --all-projects sends same payload as --file`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); + t.match( + result, + 'rubygems/graph/some/project-id', + 'rubygems project was monitored', + ); + t.match( + result, + 'Dependency snyk@* was not found in yarn.lock', + 'yarn project had an error and we displayed it', + ); - // mock go plugin becuase CI tooling doesn't have go installed - const mockPlugin = { - async inspect() { - return { - plugin: { - targetFile: 'Gopkg.lock', - name: 'snyk-go-plugin', - runtime: 'go', - }, - package: {}, - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('golangdep').returns(mockPlugin); - loadPlugin.callThrough(); // don't mock other plugins + const request = params.server.popRequest(); - await params.cli.monitor('monorepo-with-nuget', { - allProjects: true, - detectionDepth: 4, - }); + t.match( + request.url, + '/api/v1/monitor/rubygems/graph', + 'puts at correct url', + ); + t.notOk(request.body.targetFile, "doesn't send the targetFile"); + t.equal(request.method, 'PUT', 'makes PUT request'); + t.equal( + request.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + }, + '`monitor monorepo-with-nuget --all-projects sends same payload as --file`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); - const [ - projectAssetsAll, - cocoapodsAll, - golangdepAll, - npmAll, - packageConfigAll, - paketAll, - ] = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); + // mock go plugin becuase CI tooling doesn't have go installed + const mockPlugin = { + async inspect() { + return { + plugin: { + targetFile: 'Gopkg.lock', + name: 'snyk-go-plugin', + runtime: 'go', + }, + package: {}, + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('golangdep').returns(mockPlugin); + loadPlugin.callThrough(); // don't mock other plugins - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `src${path.sep}cartservice-nuget${path.sep}obj${path.sep}project.assets.json`, - }); - const projectAssetsFile = params.server.popRequest(); + await params.cli.monitor('monorepo-with-nuget', { + allProjects: true, + detectionDepth: 4, + }); - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `src${path.sep}cocoapods-app${path.sep}Podfile.lock`, - }); - const cocoapodsFile = params.server.popRequest(); + const [ + projectAssetsAll, + cocoapodsAll, + golangdepAll, + npmAll, + packageConfigAll, + paketAll, + ] = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `src${path.sep}frontend${path.sep}Gopkg.lock`, - }); - const golangdepFile = params.server.popRequest(); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `src${path.sep}cartservice-nuget${path.sep}obj${path.sep}project.assets.json`, + }); + const projectAssetsFile = params.server.popRequest(); - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `src${path.sep}paymentservice${path.sep}package-lock.json`, - }); - const npmFile = params.server.popRequest(); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `src${path.sep}cocoapods-app${path.sep}Podfile.lock`, + }); + const cocoapodsFile = params.server.popRequest(); - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `test${path.sep}nuget-app-4${path.sep}packages.config`, - }); - const packageConfigFile = params.server.popRequest(); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `src${path.sep}frontend${path.sep}Gopkg.lock`, + }); + const golangdepFile = params.server.popRequest(); - params.server.restore(); - await params.cli.monitor('monorepo-with-nuget', { - file: `test${path.sep}paket-app${path.sep}paket.dependencies`, - }); - const paketFile = params.server.popRequest(); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `src${path.sep}paymentservice${path.sep}package-lock.json`, + }); + const npmFile = params.server.popRequest(); - t.same( - projectAssetsAll.body, - projectAssetsFile.body, - `same body for --all-projects and --file=src${path.sep}cartservice-nuget${path.sep}obj${path.sep}project.assets.json`, - ); - t.same( - cocoapodsAll.body, - cocoapodsFile.body, - `same body for --all-projects and --file=src${path.sep}cocoapods-app${path.sep}Podfile.lock`, - ); - t.same( - golangdepAll.body, - golangdepFile.body, - `same body for --all-projects and --file=src${path.sep}frontend${path.sep}Gopkg.lock`, - ); - t.same( - npmAll.body, - npmFile.body, - `same body for --all-projects and --file=src${path.sep}paymentservice${path.sep}package-lock.json`, - ); - t.same( - packageConfigAll.body, - packageConfigFile.body, - `same body for --all-projects and --file=test${path.sep}nuget-app-4${path.sep}packages.config`, - ); - t.same( - paketAll.body, - paketFile.body, - `same body for --all-projects and --file=test${path.sep}paket-app${path.sep}paket.dependencies`, - ); - }, - '`monitor mono-repo-go/hello-dep --all-projects sends same body as --file`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - // mock plugin becuase CI tooling doesn't have go installed - const mockPlugin = { - async inspect() { - return { - plugin: { - targetFile: 'Gopkg.lock', - name: 'snyk-go-plugin', - runtime: 'go', - }, - package: {}, - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('golangdep').returns(mockPlugin); - await params.cli.monitor('mono-repo-go/hello-dep', { - allProjects: true, - }); - const allProjectsBody = params.server.popRequest(); - await params.cli.monitor('mono-repo-go/hello-dep', { - file: 'Gopkg.lock', - }); - const fileBody = params.server.popRequest(); - t.same( - allProjectsBody.body, - fileBody.body, - 'same body for --all-projects and --file=mono-repo-go/hello-dep/Gopkg.lock', - ); - }, - '`monitor mono-repo-go/hello-mod --all-projects sends same body as --file`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - // mock plugin becuase CI tooling doesn't have go installed - const mockPlugin = { - async inspect() { - return { - plugin: { - targetFile: 'go.mod', - name: 'snyk-go-plugin', - runtime: 'go', - }, - package: {}, - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gomodules').returns(mockPlugin); - await params.cli.monitor('mono-repo-go/hello-mod', { - allProjects: true, - }); - const allProjectsBody = params.server.popRequest(); - await params.cli.monitor('mono-repo-go/hello-mod', { - file: 'go.mod', - }); - const fileBody = params.server.popRequest(); - t.same( - allProjectsBody.body, - fileBody.body, - 'same body for --all-projects and --file=mono-repo-go/hello-mod/go.mod', - ); - }, - '`monitor mono-repo-go/hello-vendor --all-projects sends same body as --file`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - // mock plugin becuase CI tooling doesn't have go installed - const mockPlugin = { - async inspect() { - return { - plugin: { - targetFile: 'vendor/vendor.json', - name: 'snyk-go-plugin', - runtime: 'go', - }, - package: {}, - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('govendor').returns(mockPlugin); - await params.cli.monitor('mono-repo-go/hello-vendor', { - allProjects: true, - }); - const allProjectsBody = params.server.popRequest(); - await params.cli.monitor('mono-repo-go/hello-vendor', { - file: 'vendor/vendor.json', - }); - const fileBody = params.server.popRequest(); - t.same( - allProjectsBody.body, - fileBody.body, - 'same body for --all-projects and --file=mono-repo-go/hello-vendor/vendor/vendor.json', - ); - }, - '`monitor mono-repo-go with --all-projects and --detection-depth=3`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - // mock plugin becuase CI tooling doesn't have go installed - const mockPlugin = { - async inspect() { - return { - plugin: { - name: 'mock', - }, - package: {}, - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('golangdep').returns(mockPlugin); - loadPlugin.withArgs('gomodules').returns(mockPlugin); - loadPlugin.withArgs('govendor').returns(mockPlugin); - loadPlugin.callThrough(); // don't mock npm plugin - const result = await params.cli.monitor('mono-repo-go', { - allProjects: true, - detectionDepth: 3, - }); - t.match(result, 'golangdep/some/project-id', 'dep project was monitored'); - t.match(result, 'gomodules/some/project-id', 'mod project was monitored'); - t.match(result, 'npm/graph/some/project-id', 'npm project was monitored'); - t.match( - result, - 'govendor/some/project-id', - 'vendor project was monitored', - ); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `test${path.sep}nuget-app-4${path.sep}packages.config`, + }); + const packageConfigFile = params.server.popRequest(); - const requests = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); - t.equal(requests.length, 4, 'correct amount of monitor requests'); + params.server.restore(); + await params.cli.monitor('monorepo-with-nuget', { + file: `test${path.sep}paket-app${path.sep}paket.dependencies`, + }); + const paketFile = params.server.popRequest(); - requests.forEach((req) => { + t.same( + projectAssetsAll.body, + projectAssetsFile.body, + `same body for --all-projects and --file=src${path.sep}cartservice-nuget${path.sep}obj${path.sep}project.assets.json`, + ); + t.same( + cocoapodsAll.body, + cocoapodsFile.body, + `same body for --all-projects and --file=src${path.sep}cocoapods-app${path.sep}Podfile.lock`, + ); + t.same( + golangdepAll.body, + golangdepFile.body, + `same body for --all-projects and --file=src${path.sep}frontend${path.sep}Gopkg.lock`, + ); + t.same( + npmAll.body, + npmFile.body, + `same body for --all-projects and --file=src${path.sep}paymentservice${path.sep}package-lock.json`, + ); + t.same( + packageConfigAll.body, + packageConfigFile.body, + `same body for --all-projects and --file=test${path.sep}nuget-app-4${path.sep}packages.config`, + ); + t.same( + paketAll.body, + paketFile.body, + `same body for --all-projects and --file=test${path.sep}paket-app${path.sep}paket.dependencies`, + ); + }, + '`monitor mono-repo-go/hello-dep --all-projects sends same body as --file`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + // mock plugin becuase CI tooling doesn't have go installed + const mockPlugin = { + async inspect() { + return { + plugin: { + targetFile: 'Gopkg.lock', + name: 'snyk-go-plugin', + runtime: 'go', + }, + package: {}, + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('golangdep').returns(mockPlugin); + await params.cli.monitor('mono-repo-go/hello-dep', { + allProjects: true, + }); + const allProjectsBody = params.server.popRequest(); + await params.cli.monitor('mono-repo-go/hello-dep', { + file: 'Gopkg.lock', + }); + const fileBody = params.server.popRequest(); + t.same( + allProjectsBody.body, + fileBody.body, + 'same body for --all-projects and --file=mono-repo-go/hello-dep/Gopkg.lock', + ); + }, + '`monitor mono-repo-go/hello-mod --all-projects sends same body as --file`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + // mock plugin becuase CI tooling doesn't have go installed + const mockPlugin = { + async inspect() { + return { + plugin: { + targetFile: 'go.mod', + name: 'snyk-go-plugin', + runtime: 'go', + }, + package: {}, + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gomodules').returns(mockPlugin); + await params.cli.monitor('mono-repo-go/hello-mod', { + allProjects: true, + }); + const allProjectsBody = params.server.popRequest(); + await params.cli.monitor('mono-repo-go/hello-mod', { + file: 'go.mod', + }); + const fileBody = params.server.popRequest(); + t.same( + allProjectsBody.body, + fileBody.body, + 'same body for --all-projects and --file=mono-repo-go/hello-mod/go.mod', + ); + }, + '`monitor mono-repo-go/hello-vendor --all-projects sends same body as --file`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + // mock plugin becuase CI tooling doesn't have go installed + const mockPlugin = { + async inspect() { + return { + plugin: { + targetFile: 'vendor/vendor.json', + name: 'snyk-go-plugin', + runtime: 'go', + }, + package: {}, + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('govendor').returns(mockPlugin); + await params.cli.monitor('mono-repo-go/hello-vendor', { + allProjects: true, + }); + const allProjectsBody = params.server.popRequest(); + await params.cli.monitor('mono-repo-go/hello-vendor', { + file: 'vendor/vendor.json', + }); + const fileBody = params.server.popRequest(); + t.same( + allProjectsBody.body, + fileBody.body, + 'same body for --all-projects and --file=mono-repo-go/hello-vendor/vendor/vendor.json', + ); + }, + '`monitor mono-repo-go with --all-projects and --detection-depth=3`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + // mock plugin becuase CI tooling doesn't have go installed + const mockPlugin = { + async inspect() { + return { + plugin: { + name: 'mock', + }, + package: {}, + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('golangdep').returns(mockPlugin); + loadPlugin.withArgs('gomodules').returns(mockPlugin); + loadPlugin.withArgs('govendor').returns(mockPlugin); + loadPlugin.callThrough(); // don't mock npm plugin + const result = await params.cli.monitor('mono-repo-go', { + allProjects: true, + detectionDepth: 3, + }); t.match( - req.url, - /\/api\/v1\/monitor\/(npm\/graph|golangdep|gomodules|govendor)/, - 'puts at correct url', + result, + 'golangdep/some/project-id', + 'dep project was monitored', ); - t.notOk(req.body.targetFile, "doesn't send the targetFile"); - t.equal(req.method, 'PUT', 'makes PUT request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match( + result, + 'gomodules/some/project-id', + 'mod project was monitored', ); - }); - }, - '`monitor gradle-monorepo with --all-projects`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - const simpleGradleGraph = depGraphLib.createFromJSON({ - schemaVersion: '1.2.0', - pkgManager: { - name: 'gradle', - }, - pkgs: [ - { - id: 'gradle-monorepo@0.0.0', - info: { - name: 'gradle-monorepo', - version: '0.0.0', - }, + t.match( + result, + 'npm/graph/some/project-id', + 'npm project was monitored', + ); + t.match( + result, + 'govendor/some/project-id', + 'vendor project was monitored', + ); + + const requests = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); + t.equal(requests.length, 4, 'correct amount of monitor requests'); + + requests.forEach((req) => { + t.match( + req.url, + /\/api\/v1\/monitor\/(npm\/graph|golangdep|gomodules|govendor)/, + 'puts at correct url', + ); + t.notOk(req.body.targetFile, "doesn't send the targetFile"); + t.equal(req.method, 'PUT', 'makes PUT request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + }); + }, + '`monitor gradle-monorepo with --all-projects`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const simpleGradleGraph = depGraphLib.createFromJSON({ + schemaVersion: '1.2.0', + pkgManager: { + name: 'gradle', }, - ], - graph: { - rootNodeId: 'root-node', - nodes: [ + pkgs: [ { - nodeId: 'root-node', - pkgId: 'gradle-monorepo@0.0.0', - deps: [], + id: 'gradle-monorepo@0.0.0', + info: { + name: 'gradle-monorepo', + version: '0.0.0', + }, }, ], - }, - }); - const plugin = { - async inspect() { - return { - plugin: { - name: 'bundled:gradle', - runtime: 'unknown', - meta: {}, - }, - scannedProjects: [ + graph: { + rootNodeId: 'root-node', + nodes: [ { - meta: { - gradleProjectName: 'root-proj', - versionBuildInfo: { - gradleVersion: '6.5', + nodeId: 'root-node', + pkgId: 'gradle-monorepo@0.0.0', + deps: [], + }, + ], + }, + }); + const plugin = { + async inspect() { + return { + plugin: { + name: 'bundled:gradle', + runtime: 'unknown', + meta: {}, + }, + scannedProjects: [ + { + meta: { + gradleProjectName: 'root-proj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - { - meta: { - gradleProjectName: 'root-proj/subproj', - versionBuildInfo: { - gradleVersion: '6.5', + { + meta: { + gradleProjectName: 'root-proj/subproj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, + targetFile: + getWorkspacePath('gradle-monorepo') + + '/subproj/build.gradle', }, - depGraph: simpleGradleGraph, - }, - ], - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); - loadPlugin.callThrough(); - const result = await params.cli.monitor('gradle-monorepo', { - allProjects: true, - detectionDepth: 3, - d: true, - }); - t.match( - result, - 'gradle/graph/some/project-id', - 'gradle project was monitored', - ); - t.match( - result, - 'npm/graph/some/project-id', - 'gradle project was monitored', - ); - - const requests = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); - t.equal(requests.length, 3, 'correct amount of monitor requests'); - requests.forEach((req) => { + ], + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); + loadPlugin.callThrough(); + const result = await params.cli.monitor('gradle-monorepo', { + allProjects: true, + detectionDepth: 3, + d: true, + }); t.match( - req.url, - /\/api\/v1\/monitor\/(npm\/graph|gradle\/graph)/, - 'puts at correct url', + result, + 'gradle/graph/some/project-id', + 'gradle project was monitored', ); - t.notOk(req.body.targetFile, "doesn't send the targetFile"); - t.equal(req.method, 'PUT', 'makes PUT request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match( + result, + 'npm/graph/some/project-id', + 'gradle project was monitored', ); - }); - }, - '`monitor kotlin-monorepo --all-projects` scans kotlin files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const simpleGradleGraph = depGraphLib.createFromJSON({ - schemaVersion: '1.2.0', - pkgManager: { - name: 'gradle', - }, - pkgs: [ - { - id: 'gradle-monorepo@0.0.0', - info: { - name: 'gradle-monorepo', - version: '0.0.0', - }, + + let policyCount = 0; + const requests = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); + t.equal(requests.length, 3, 'correct amount of monitor requests'); + requests.forEach((req) => { + t.match( + req.url, + /\/api\/v1\/monitor\/(npm\/graph|gradle\/graph)/, + 'puts at correct url', + ); + + if (req.body.policy) { + policyCount++; + } + t.notOk(req.body.targetFile, "doesn't send the targetFile"); + t.equal(req.method, 'PUT', 'makes PUT request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + }); + t.equal(policyCount, 1, '1 nested policy found in monorepo'); + }, + '`monitor kotlin-monorepo --all-projects` scans kotlin files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const simpleGradleGraph = depGraphLib.createFromJSON({ + schemaVersion: '1.2.0', + pkgManager: { + name: 'gradle', }, - ], - graph: { - rootNodeId: 'root-node', - nodes: [ + pkgs: [ { - nodeId: 'root-node', - pkgId: 'gradle-monorepo@0.0.0', - deps: [], + id: 'gradle-monorepo@0.0.0', + info: { + name: 'gradle-monorepo', + version: '0.0.0', + }, }, ], - }, - }); - const plugin = { - async inspect() { - return { - plugin: { - name: 'bundled:gradle', - runtime: 'unknown', - meta: {}, - }, - scannedProjects: [ + graph: { + rootNodeId: 'root-node', + nodes: [ { - meta: { - gradleProjectName: 'root-proj', - versionBuildInfo: { - gradleVersion: '6.5', + nodeId: 'root-node', + pkgId: 'gradle-monorepo@0.0.0', + deps: [], + }, + ], + }, + }); + const plugin = { + async inspect() { + return { + plugin: { + name: 'bundled:gradle', + runtime: 'unknown', + meta: {}, + }, + scannedProjects: [ + { + meta: { + gradleProjectName: 'root-proj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - { - meta: { - gradleProjectName: 'root-proj/subproj', - versionBuildInfo: { - gradleVersion: '6.5', + { + meta: { + gradleProjectName: 'root-proj/subproj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - ], - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); - loadPlugin.callThrough(); + ], + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); + loadPlugin.callThrough(); - const result = await params.cli.monitor('kotlin-monorepo', { - allProjects: true, - detectionDepth: 3, - }); - t.ok(loadPlugin.withArgs('rubygems').calledOnce, 'calls rubygems plugin'); - t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); + const result = await params.cli.monitor('kotlin-monorepo', { + allProjects: true, + detectionDepth: 3, + }); + t.ok( + loadPlugin.withArgs('rubygems').calledOnce, + 'calls rubygems plugin', + ); + t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); - t.match( - result, - 'gradle/graph/some/project-id', - 'gradle project was monitored', - ); - t.match( - result, - 'rubygems/graph/some/project-id', - 'rubygems project was monitored', - ); + t.match( + result, + 'gradle/graph/some/project-id', + 'gradle project was monitored', + ); + t.match( + result, + 'rubygems/graph/some/project-id', + 'rubygems project was monitored', + ); - const requests = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); - t.equal(requests.length, 3, 'correct amount of monitor requests'); - requests.forEach((req) => { + const requests = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); + t.equal(requests.length, 3, 'correct amount of monitor requests'); + requests.forEach((req) => { + t.match( + req.url, + /\/api\/v1\/monitor\/(rubygems\/graph|gradle\/graph)/, + 'puts at correct url', + ); + t.notOk(req.body.targetFile, "doesn't send the targetFile"); + t.equal(req.method, 'PUT', 'makes PUT request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + }); + }, + '`monitor mono-repo-poetry with --all-projects --detection-depth=2`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const result = await params.cli.monitor('mono-repo-poetry', { + allProjects: true, + detectionDepth: 2, + }); t.match( - req.url, - /\/api\/v1\/monitor\/(rubygems\/graph|gradle\/graph)/, - 'puts at correct url', + result, + 'npm/graph/some/project-id', + 'npm project was monitored ', ); - t.notOk(req.body.targetFile, "doesn't send the targetFile"); - t.equal(req.method, 'PUT', 'makes PUT request'); + t.match( + result, + 'poetry/graph/some/project-id', + 'poetry project was monitored ', + ); + const requests = params.server.popRequests(2); + requests.forEach((request) => { + const urlOk = + request.url === '/api/v1/monitor/npm' || + '/api/v1/monitor/poetry/graph'; + t.ok(urlOk, 'puts at correct url'); + t.equal(request.method, 'PUT', 'makes PUT request'); + t.equal( + request.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + }); + }, + 'monitor yarn-workspaces --all-projects --detection-depth=5 finds Yarn workspaces, Npm and Yarn projects': + (params, utils) => async (t) => { + t.teardown(() => { + loadPlugin.restore(); + }); + utils.chdirWorkspaces(); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + + const result = await params.cli.monitor('yarn-workspaces', { + allProjects: true, + detectionDepth: 5, + }); + // the parser is used directly t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + loadPlugin.withArgs('yarn').callCount, + 1, + 'loads plugin for yarn as we detect a Yarn projevct inside a workspace', ); - }); - }, - '`monitor mono-repo-poetry with --all-projects --detection-depth=2`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const result = await params.cli.monitor('mono-repo-poetry', { - allProjects: true, - detectionDepth: 2, - }); - t.match( - result, - 'npm/graph/some/project-id', - 'npm project was monitored ', - ); - t.match( - result, - 'poetry/graph/some/project-id', - 'poetry project was monitored ', - ); - const requests = params.server.popRequests(2); - requests.forEach((request) => { - const urlOk = - request.url === '/api/v1/monitor/npm' || - '/api/v1/monitor/poetry/graph'; - t.ok(urlOk, 'puts at correct url'); - t.equal(request.method, 'PUT', 'makes PUT request'); t.equal( - request.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + loadPlugin.withArgs('npm').callCount, + 1, + 'calls npm plugin once', ); - }); - }, - 'monitor yarn-workspaces --all-projects --detection-depth=5 finds Yarn workspaces, Npm and Yarn projects': ( - params, - utils, - ) => async (t) => { - t.teardown(() => { - loadPlugin.restore(); - }); - utils.chdirWorkspaces(); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - - const result = await params.cli.monitor('yarn-workspaces', { - allProjects: true, - detectionDepth: 5, - }); - // the parser is used directly - t.equal( - loadPlugin.withArgs('yarn').callCount, - 1, - 'loads plugin for yarn as we detect a Yarn projevct inside a workspace', - ); - t.equal(loadPlugin.withArgs('npm').callCount, 1, 'calls npm plugin once'); - t.match( - result, - 'Monitoring yarn-workspaces (package.json)', - 'yarn workspace root was monitored', - ); - t.match( - result, - 'Monitoring yarn-workspaces (apple-lib)', - 'yarn workspace was monitored', - ); - t.match( - result, - 'Monitoring yarn-workspaces (apples)', - 'yarn workspace was monitored', - ); - t.match( - result, - 'Monitoring yarn-workspaces (tomatoes)', - 'yarn workspace was monitored', - ); - t.match( - result, - 'Monitoring yarn-workspaces (not-in-a-workspace)', - 'npm project was monitored', - ); - t.match( - result, - 'Monitoring yarn-workspaces (not-part-of-workspace)', - 'yarn project was monitored', - ); - - const requests = params.server - .getRequests() - .filter((req) => req.url.includes('/monitor/')); - t.equal(requests.length, 6, 'correct amount of monitor requests'); - let policyCount = 0; - const applesWorkspace = - process.platform === 'win32' - ? '\\apples\\package.json' - : 'apples/package.json'; - const tomatoesWorkspace = - process.platform === 'win32' - ? '\\tomatoes\\package.json' - : 'tomatoes/package.json'; - const rootWorkspace = - process.platform === 'win32' - ? '\\yarn-workspaces\\package.json' - : 'yarn-workspaces/package.json'; - requests.forEach((req) => { t.match( - req.url, - /\/api\/v1\/monitor\/(yarn\/graph|npm\/graph)/, - 'puts at correct url', + result, + 'Monitoring yarn-workspaces (package.json)', + 'yarn workspace root was monitored', ); - t.equal(req.method, 'PUT', 'makes PUT request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match( + result, + 'Monitoring yarn-workspaces (apple-lib)', + 'yarn workspace was monitored', ); - if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { + t.match( + result, + 'Monitoring yarn-workspaces (apples)', + 'yarn workspace was monitored', + ); + t.match( + result, + 'Monitoring yarn-workspaces (tomatoes)', + 'yarn workspace was monitored', + ); + t.match( + result, + 'Monitoring yarn-workspaces (not-in-a-workspace)', + 'npm project was monitored', + ); + t.match( + result, + 'Monitoring yarn-workspaces (not-part-of-workspace)', + 'yarn project was monitored', + ); + + const requests = params.server + .getRequests() + .filter((req) => req.url.includes('/monitor/')); + t.equal(requests.length, 6, 'correct amount of monitor requests'); + let policyCount = 0; + const applesWorkspace = + process.platform === 'win32' + ? '\\apples\\package.json' + : 'apples/package.json'; + const tomatoesWorkspace = + process.platform === 'win32' + ? '\\tomatoes\\package.json' + : 'tomatoes/package.json'; + const rootWorkspace = + process.platform === 'win32' + ? '\\yarn-workspaces\\package.json' + : 'yarn-workspaces/package.json'; + requests.forEach((req) => { t.match( - req.body.policy, - 'npm:node-uuid:20160328', - 'policy is as expected', + req.url, + /\/api\/v1\/monitor\/(yarn\/graph|npm\/graph)/, + 'puts at correct url', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } else if ( - req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) - ) { - t.notOk(req.body.policy, 'body does not contain policy'); - } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20111130', - 'policy is as expected', + t.equal(req.method, 'PUT', 'makes PUT request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } - }); - t.equal(policyCount, 2, '2 policies found in a workspace'); - }, + if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20160328', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } else if ( + req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) + ) { + t.notOk(req.body.policy, 'body does not contain policy'); + } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20111130', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } + }); + t.equal(policyCount, 2, '2 policies found in a workspace'); + }, }, }; diff --git a/test/tap/cli-test/cli-test.all-projects.spec.ts b/test/tap/cli-test/cli-test.all-projects.spec.ts index 5188458327..6d4a0e1bac 100644 --- a/test/tap/cli-test/cli-test.all-projects.spec.ts +++ b/test/tap/cli-test/cli-test.all-projects.spec.ts @@ -32,322 +32,317 @@ const simpleGradleGraph = depGraphLib.createFromJSON({ export const AllProjectsTests: AcceptanceTests = { language: 'Mixed', tests: { - '`test gradle-with-orphaned-build-file --all-projects` warns user': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - plugin: { - name: 'bundled:gradle', - runtime: 'unknown', - meta: {}, - }, - scannedProjects: [ - { - meta: { - gradleProjectName: 'root-proj', - versionBuildInfo: { - gradleVersion: '6.5', + '`test gradle-with-orphaned-build-file --all-projects` warns user': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + plugin: { + name: 'bundled:gradle', + runtime: 'unknown', + meta: {}, + }, + scannedProjects: [ + { + meta: { + gradleProjectName: 'root-proj', + versionBuildInfo: { + gradleVersion: '6.5', + }, + targetFile: 'build.gradle', }, - targetFile: 'build.gradle', + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - { - meta: { - gradleProjectName: 'root-proj/subproj', - versionBuildInfo: { - gradleVersion: '6.5', + { + meta: { + gradleProjectName: 'root-proj/subproj', + versionBuildInfo: { + gradleVersion: '6.5', + }, + targetFile: 'subproj/build.gradle', }, - targetFile: 'subproj/build.gradle', + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - ], - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - loadPlugin.returns(plugin); - t.teardown(loadPlugin.restore); + ], + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + loadPlugin.returns(plugin); + t.teardown(loadPlugin.restore); - // read data from console.log - let stdoutMessages = ''; - const stubConsoleLog = (msg: string) => (stdoutMessages += msg); - const stubbedConsole = sinon - .stub(console, 'warn') - .callsFake(stubConsoleLog); - const result: CommandResult = await params.cli.test( - 'gradle-with-orphaned-build-file', - { - allProjects: true, - detectionDepth: 3, - }, - ); - t.same( - stdoutMessages, - `${icon.ISSUE} 1/3 detected Gradle manifests did not return dependencies. ` + - 'They may have errored or were not included as part of a multi-project build. You may need to scan them individually with --file=path/to/file. Run with `-d` for more info.', - ); - stubbedConsole.restore(); - t.ok(stubbedConsole.calledOnce); - t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); + // read data from console.log + let stdoutMessages = ''; + const stubConsoleLog = (msg: string) => (stdoutMessages += msg); + const stubbedConsole = sinon + .stub(console, 'warn') + .callsFake(stubConsoleLog); + const result: CommandResult = await params.cli.test( + 'gradle-with-orphaned-build-file', + { + allProjects: true, + detectionDepth: 3, + }, + ); + t.same( + stdoutMessages, + `${icon.ISSUE} 1/3 detected Gradle manifests did not return dependencies. ` + + 'They may have errored or were not included as part of a multi-project build. You may need to scan them individually with --file=path/to/file. Run with `-d` for more info.', + ); + stubbedConsole.restore(); + t.ok(stubbedConsole.calledOnce); + t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); - t.match( - result.getDisplayResults(), - 'Tested 2 projects', - 'Detected 2 projects', - ); - }, - '`test kotlin-monorepo --all-projects` scans kotlin files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - plugin: { - name: 'bundled:gradle', - runtime: 'unknown', - meta: {}, - }, - scannedProjects: [ - { - meta: { - gradleProjectName: 'root-proj', - versionBuildInfo: { - gradleVersion: '6.5', + t.match( + result.getDisplayResults(), + 'Tested 2 projects', + 'Detected 2 projects', + ); + }, + '`test kotlin-monorepo --all-projects` scans kotlin files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + plugin: { + name: 'bundled:gradle', + runtime: 'unknown', + meta: {}, + }, + scannedProjects: [ + { + meta: { + gradleProjectName: 'root-proj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - { - meta: { - gradleProjectName: 'root-proj/subproj', - versionBuildInfo: { - gradleVersion: '6.5', + { + meta: { + gradleProjectName: 'root-proj/subproj', + versionBuildInfo: { + gradleVersion: '6.5', + }, }, + depGraph: simpleGradleGraph, }, - depGraph: simpleGradleGraph, - }, - ], - }; - }, - }; - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - loadPlugin.withArgs('gradle').returns(plugin); - loadPlugin.callThrough(); - t.teardown(loadPlugin.restore); + ], + }; + }, + }; + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + loadPlugin.withArgs('gradle').returns(plugin); + loadPlugin.callThrough(); + t.teardown(loadPlugin.restore); - const result: CommandResult = await params.cli.test('kotlin-monorepo', { - allProjects: true, - detectionDepth: 3, - }); - t.ok(loadPlugin.withArgs('rubygems').calledOnce, 'calls rubygems plugin'); - t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); + const result: CommandResult = await params.cli.test('kotlin-monorepo', { + allProjects: true, + detectionDepth: 3, + }); + t.ok( + loadPlugin.withArgs('rubygems').calledOnce, + 'calls rubygems plugin', + ); + t.ok(loadPlugin.withArgs('gradle').calledOnce, 'calls gradle plugin'); - const backendRequests = params.server.popRequests(2); - t.equal(backendRequests.length, 2); + const backendRequests = params.server.popRequests(2); + t.equal(backendRequests.length, 2); - backendRequests.forEach((req) => { - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + backendRequests.forEach((req) => { + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); + t.ok(req.body.depGraph, 'body contains depGraph'); + t.match( + req.body.depGraph.pkgManager.name, + /(gradle|rubygems)/, + 'depGraph has package manager', + ); + }); + t.match( + result.getDisplayResults(), + 'Tested 3 projects', + 'Detected 3 projects', ); - t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); - t.ok(req.body.depGraph, 'body contains depGraph'); t.match( - req.body.depGraph.pkgManager.name, - /(gradle|rubygems)/, - 'depGraph has package manager', + result.getDisplayResults(), + 'Package manager: rubygems', + 'contains package manager rubygems', + ); + t.match( + result.getDisplayResults(), + 'Package manager: gradle', + 'contains package manager gradle', + ); + t.match( + result.getDisplayResults(), + 'Target file: Gemfile.lock', + 'contains target file Gemfile.lock', + ); + t.match( + result.getDisplayResults(), + 'Target file: build.gradle.kts', + 'contains target file build.gradle.kts', + ); + }, + 'test yarn-workspaces-v2-resolutions --all-projects --detection-depth=5 --strict-out-of-sync=false (yarn v2 with resolutions)': + (params, utils) => async (t) => { + // Yarn workspaces for Yarn 2 is only supported on Node 10+ + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspaces-v2-resolutions', { + allProjects: true, + detectionDepth: 5, + strictOutOfSync: false, + printDeps: true, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + t.match( + result.getDisplayResults(), + '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', + 'correctly showing dep number', + ); + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 3 projects, no vulnerable paths were found.', + 'no vulnerable paths found as both policies detected and applied.', + ); + }, + 'test --all-projects --detection-depth=5 finds Yarn workspaces & Npm projects': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspaces', { + allProjects: true, + detectionDepth: 5, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + const output = result.getDisplayResults(); + t.match(output, 'Package manager: yarn\n'); + t.match(output, 'Package manager: npm\n'); + t.match( + output, + 'Target file: not-part-of-workspace/package-lock.json', + 'npm project in outside of yarn workspace is in output', + ); + t.match( + output, + 'Target file: not-part-of-workspace-yarn/yarn.lock', + 'yarn project outside of workspace is in the output', + ); + t.match( + output, + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + output, + 'Project name: tomatoes', + 'workspace yarn project in output', + ); + t.match( + output, + 'Project name: apples', + 'workspace yarn project in output', + ); + t.match( + output, + 'Project name: apple-lib', + 'workspace yarn project in output', ); - }); - t.match( - result.getDisplayResults(), - 'Tested 3 projects', - 'Detected 3 projects', - ); - t.match( - result.getDisplayResults(), - 'Package manager: rubygems', - 'contains package manager rubygems', - ); - t.match( - result.getDisplayResults(), - 'Package manager: gradle', - 'contains package manager gradle', - ); - t.match( - result.getDisplayResults(), - 'Target file: Gemfile.lock', - 'contains target file Gemfile.lock', - ); - t.match( - result.getDisplayResults(), - 'Target file: build.gradle.kts', - 'contains target file build.gradle.kts', - ); - }, - 'test yarn-workspaces-v2-resolutions --all-projects --detection-depth=5 --strict-out-of-sync=false (yarn v2 with resolutions)': ( - params, - utils, - ) => async (t) => { - // Yarn workspaces for Yarn 2 is only supported on Node 10+ - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspaces-v2-resolutions', { - allProjects: true, - detectionDepth: 5, - strictOutOfSync: false, - printDeps: true, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - t.match( - result.getDisplayResults(), - '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', - 'correctly showing dep number', - ); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 3 projects, no vulnerable paths were found.', - 'no vulnerable paths found as both policies detected and applied.', - ); - }, - 'test --all-projects --detection-depth=5 finds Yarn workspaces & Npm projects': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspaces', { - allProjects: true, - detectionDepth: 5, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - const output = result.getDisplayResults(); - t.match(output, 'Package manager: yarn\n'); - t.match(output, 'Package manager: npm\n'); - t.match( - output, - 'Target file: not-part-of-workspace/package-lock.json', - 'npm project in outside of yarn workspace is in output', - ); - t.match( - output, - 'Target file: not-part-of-workspace-yarn/yarn.lock', - 'yarn project outside of workspace is in the output', - ); - t.match( - output, - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - output, - 'Project name: tomatoes', - 'workspace yarn project in output', - ); - t.match( - output, - 'Project name: apples', - 'workspace yarn project in output', - ); - t.match( - output, - 'Project name: apple-lib', - 'workspace yarn project in output', - ); - - t.match( - output, - 'Tested 6 projects, no vulnerable paths were found.', - 'tested 4 workspace projects, 1 npm project and 1 yarn project', - ); - let policyCount = 0; - const applesWorkspace = - process.platform === 'win32' - ? '\\apples\\package.json' - : 'apples/package.json'; - const tomatoesWorkspace = - process.platform === 'win32' - ? '\\tomatoes\\package.json' - : 'tomatoes/package.json'; - const rootWorkspace = - process.platform === 'win32' - ? '\\yarn-workspaces\\package.json' - : 'yarn-workspaces/package.json'; - - const backendRequests = params.server.popRequests(6); - t.equal(backendRequests.length, 6); - backendRequests.forEach((req) => { - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match( + output, + 'Tested 6 projects, no vulnerable paths were found.', + 'tested 4 workspace projects, 1 npm project and 1 yarn project', ); - t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); - t.ok(req.body.depGraph, 'body contains depGraph'); + let policyCount = 0; + const applesWorkspace = + process.platform === 'win32' + ? '\\apples\\package.json' + : 'apples/package.json'; + const tomatoesWorkspace = + process.platform === 'win32' + ? '\\tomatoes\\package.json' + : 'tomatoes/package.json'; + const rootWorkspace = + process.platform === 'win32' + ? '\\yarn-workspaces\\package.json' + : 'yarn-workspaces/package.json'; - if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20160328', - 'policy is as expected', + const backendRequests = params.server.popRequests(6); + t.equal(backendRequests.length, 6); + + backendRequests.forEach((req) => { + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } else if ( - req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) - ) { - t.notOk(req.body.policy, 'body does not contain policy'); - } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { + t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); + t.ok(req.body.depGraph, 'body contains depGraph'); + + if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20160328', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } else if ( + req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) + ) { + t.notOk(req.body.policy, 'body does not contain policy'); + } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20111130', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } t.match( - req.body.policy, - 'npm:node-uuid:20111130', - 'policy is as expected', + req.body.depGraph.pkgManager.name, + /(yarn|npm)/, + 'depGraph has package manager', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } - t.match( - req.body.depGraph.pkgManager.name, - /(yarn|npm)/, - 'depGraph has package manager', - ); - }); - t.equal(policyCount, 2, '2 policies found in a workspace'); - }, + }); + t.equal(policyCount, 2, '2 policies found in a workspace'); + }, }, }; diff --git a/test/tap/cli-test/cli-test.composer.spec.ts b/test/tap/cli-test/cli-test.composer.spec.ts index 3572b4662d..f388b669f0 100644 --- a/test/tap/cli-test/cli-test.composer.spec.ts +++ b/test/tap/cli-test/cli-test.composer.spec.ts @@ -5,274 +5,268 @@ import { AcceptanceTests } from '../cli-test.acceptance.test'; export const ComposerTests: AcceptanceTests = { language: 'Composer', tests: { - '`test composer-app --file=composer.lock`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + '`test composer-app --file=composer.lock`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('composer').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('composer').returns(plugin); + await params.cli.test('composer-app', { + file: 'composer.lock', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'composer'); + t.same( + spyPlugin.getCall(0).args, + [ + 'composer-app', + 'composer.lock', + { + args: null, + file: 'composer.lock', + org: null, + projectName: null, + packageManager: 'composer', + path: 'composer-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls composer plugin', + ); + }, - await params.cli.test('composer-app', { - file: 'composer.lock', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'composer'); - t.same( - spyPlugin.getCall(0).args, - [ - 'composer-app', - 'composer.lock', - { - args: null, - file: 'composer.lock', - org: null, - projectName: null, - packageManager: 'composer', - path: 'composer-app', - showVulnPaths: 'some', + '`test composer-app` auto-detects composer.lock': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls composer plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test composer-app` auto-detects composer.lock': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('composer').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('composer').returns(plugin); + await params.cli.test('composer-app'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'composer'); + t.same( + spyPlugin.getCall(0).args, + [ + 'composer-app', + 'composer.lock', + { + args: null, + file: 'composer.lock', + org: null, + projectName: null, + packageManager: 'composer', + path: 'composer-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls composer plugin', + ); + }, - await params.cli.test('composer-app'); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'composer'); - t.same( - spyPlugin.getCall(0).args, - [ - 'composer-app', - 'composer.lock', - { - args: null, - file: 'composer.lock', - org: null, - projectName: null, - packageManager: 'composer', - path: 'composer-app', - showVulnPaths: 'some', + '`test composer-app --file=composer.lock --dev`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls composer plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test composer-app --file=composer.lock --dev`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('composer').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('composer').returns(plugin); + await params.cli.test('composer-app', { + file: 'composer.lock', + dev: true, + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'composer'); + t.same( + spyPlugin.getCall(0).args, + [ + 'composer-app', + 'composer.lock', + { + args: null, + dev: true, + file: 'composer.lock', + org: null, + projectName: null, + packageManager: 'composer', + path: 'composer-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls composer plugin', + ); + }, - await params.cli.test('composer-app', { - file: 'composer.lock', - dev: true, - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'composer'); - t.same( - spyPlugin.getCall(0).args, - [ - 'composer-app', - 'composer.lock', - { - args: null, - dev: true, - file: 'composer.lock', - org: null, - projectName: null, - packageManager: 'composer', - path: 'composer-app', - showVulnPaths: 'some', + '`test composer-app golang-app nuget-app` auto-detects all three projects': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls composer plugin', - ); - }, - - '`test composer-app golang-app nuget-app` auto-detects all three projects': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('composer').returns(plugin); - loadPlugin.withArgs('golangdep').returns(plugin); - loadPlugin.withArgs('nuget').returns(plugin); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('composer').returns(plugin); + loadPlugin.withArgs('golangdep').returns(plugin); + loadPlugin.withArgs('nuget').returns(plugin); - await params.cli.test('composer-app', 'golang-app', 'nuget-app', { - org: 'test-org', - }); - // assert three API calls made - const reqs = params.server - .getRequests() - .filter((r) => r.url === '/api/v1/test-dep-graph?org=test-org'); + await params.cli.test('composer-app', 'golang-app', 'nuget-app', { + org: 'test-org', + }); + // assert three API calls made + const reqs = params.server + .getRequests() + .filter((r) => r.url === '/api/v1/test-dep-graph?org=test-org'); - t.same( - reqs.map((r) => r.method), - ['POST', 'POST', 'POST'], - 'all post requests', - ); + t.same( + reqs.map((r) => r.method), + ['POST', 'POST', 'POST'], + 'all post requests', + ); - t.same( - reqs.map((r) => r.headers['x-snyk-cli-version']), - [params.versionNumber, params.versionNumber, params.versionNumber], - 'all send version number', - ); + t.same( + reqs.map((r) => r.headers['x-snyk-cli-version']), + [params.versionNumber, params.versionNumber, params.versionNumber], + 'all send version number', + ); - t.equal(reqs.length, 3, 'all urls are present'); + t.equal(reqs.length, 3, 'all urls are present'); - t.same( - reqs.map((r) => r.body.depGraph.pkgManager.name).sort(), - ['composer', 'golangdep', 'nuget'], - 'all urls are present', - ); + t.same( + reqs.map((r) => r.body.depGraph.pkgManager.name).sort(), + ['composer', 'golangdep', 'nuget'], + 'all urls are present', + ); - // assert three spyPlugin calls, each with a different app - const calls = spyPlugin.getCalls().sort((call1: any, call2: any) => { - return call1.args[0] < call2.args[1] - ? -1 - : call1.args[0] > call2.args[0] - ? 1 - : 0; - }); - t.same( - calls[0].args, - [ - 'composer-app', - 'composer.lock', - { - args: null, - org: 'test-org', - file: 'composer.lock', - projectName: null, - packageManager: 'composer', - path: 'composer-app', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls composer plugin', - ); - t.same( - calls[1].args, - [ - 'golang-app', - 'Gopkg.lock', - { - args: null, - org: 'test-org', - file: 'Gopkg.lock', - projectName: null, - packageManager: 'golangdep', - path: 'golang-app', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls golangdep plugin', - ); - t.same( - calls[2].args, - [ - 'nuget-app', - 'project.assets.json', - { - args: null, - org: 'test-org', - file: 'project.assets.json', - projectName: null, - packageManager: 'nuget', - path: 'nuget-app', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + // assert three spyPlugin calls, each with a different app + const calls = spyPlugin.getCalls().sort((call1: any, call2: any) => { + return call1.args[0] < call2.args[1] + ? -1 + : call1.args[0] > call2.args[0] + ? 1 + : 0; + }); + t.same( + calls[0].args, + [ + 'composer-app', + 'composer.lock', + { + args: null, + org: 'test-org', + file: 'composer.lock', + projectName: null, + packageManager: 'composer', + path: 'composer-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls composer plugin', + ); + t.same( + calls[1].args, + [ + 'golang-app', + 'Gopkg.lock', + { + args: null, + org: 'test-org', + file: 'Gopkg.lock', + projectName: null, + packageManager: 'golangdep', + path: 'golang-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golangdep plugin', + ); + t.same( + calls[2].args, + [ + 'nuget-app', + 'project.assets.json', + { + args: null, + org: 'test-org', + file: 'project.assets.json', + projectName: null, + packageManager: 'nuget', + path: 'nuget-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.docker.spec.ts b/test/tap/cli-test/cli-test.docker.spec.ts index 81bdd430c6..8de11c5819 100644 --- a/test/tap/cli-test/cli-test.docker.spec.ts +++ b/test/tap/cli-test/cli-test.docker.spec.ts @@ -71,20 +71,55 @@ export const DockerTests: AcceptanceTests = { packageManager: null, path: 'foo:latest', showVulnPaths: 'some', + maxVulnPaths: undefined, }, ], 'calls docker plugin with expected arguments', ); }, - '`test foo:latest --docker --platform=linux/amd64`': (params) => async ( - t, - ) => { - const spyPlugin = stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { + '`test foo:latest --docker --platform=linux/amd64`': + (params) => async (t) => { + const spyPlugin = stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { type: 'dockerfileAnalysis', data: {} }, + ], + identity: { + type: 'deb', + args: { + platform: 'linux/amd64', + }, + }, + target: { + image: 'docker-image|ubuntu', + }, + }, + ], + }, + t, + ); + + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dependencies', 'posts to correct url'); + t.same( + req.body, + { + scanResult: { facts: [ { type: 'depGraph', data: {} }, { type: 'dockerfileAnalysis', data: {} }, @@ -99,60 +134,26 @@ export const DockerTests: AcceptanceTests = { image: 'docker-image|ubuntu', }, }, - ], - }, - t, - ); - - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dependencies', 'posts to correct url'); - t.same( - req.body, - { - scanResult: { - facts: [ - { type: 'depGraph', data: {} }, - { type: 'dockerfileAnalysis', data: {} }, - ], - identity: { - type: 'deb', - args: { - platform: 'linux/amd64', - }, - }, - target: { - image: 'docker-image|ubuntu', - }, }, - }, - 'sends correct payload', - ); - t.same( - spyPlugin.getCall(0).args, - [ - { - docker: true, - 'exclude-app-vulns': false, - org: 'explicit-org', - projectName: null, - packageManager: null, - path: 'foo:latest', - showVulnPaths: 'some', - }, - ], - 'calls docker plugin with expected arguments', - ); - }, + 'sends correct payload', + ); + t.same( + spyPlugin.getCall(0).args, + [ + { + docker: true, + 'exclude-app-vulns': false, + org: 'explicit-org', + projectName: null, + packageManager: null, + path: 'foo:latest', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + ], + 'calls docker plugin with expected arguments', + ); + }, '`test foo:latest --docker vulnerable paths`': (params) => async (t) => { stubDockerPluginResponse( @@ -301,64 +302,94 @@ export const DockerTests: AcceptanceTests = { packageManager: null, path: 'foo:latest', showVulnPaths: 'some', + maxVulnPaths: undefined, }, ], 'calls docker plugin with expected arguments', ); }, - '`test foo:latest --docker --file=Dockerfile remediation advice`': ( - params, - ) => async (t) => { - stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { - facts: [ - { type: 'depGraph', data: {} }, - { type: 'dockerfileAnalysis', data: {} }, - ], - identity: { - type: 'deb', + '`test foo:latest --docker --file=Dockerfile remediation advice`': + (params) => async (t) => { + stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { type: 'dockerfileAnalysis', data: {} }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', + }, }, - target: { - image: 'docker-image|ubuntu', + ], + }, + t, + ); + const vulns = require( + getFixturePath('docker/find-result-remediation.json'), + ); + params.server.setNextResponse(vulns); + + try { + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + file: 'Dockerfile', + }); + t.fail('should have found vuln'); + } catch (err) { + const msg = err.message; + t.match(msg, 'Base Image'); + t.match(msg, 'Recommendations for base image upgrade'); + } + }, + + '`test foo:latest --docker` doesnt collect policy from cwd': + (params, utils) => async (t) => { + utils.chdirWorkspaces('npm-package-policy'); + const spyPlugin = stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { type: 'dockerfileAnalysis', data: {} }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', + }, }, - }, - ], - }, - t, - ); - const vulns = require(getFixturePath( - 'docker/find-result-remediation.json', - )); - params.server.setNextResponse(vulns); + ], + }, + t, + ); - try { await params.cli.test('foo:latest', { docker: true, org: 'explicit-org', - file: 'Dockerfile', }); - t.fail('should have found vuln'); - } catch (err) { - const msg = err.message; - t.match(msg, 'Base Image'); - t.match(msg, 'Recommendations for base image upgrade'); - } - }, - - '`test foo:latest --docker` doesnt collect policy from cwd': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces('npm-package-policy'); - const spyPlugin = stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dependencies', 'posts to correct url'); + t.same( + req.body, + { + scanResult: { facts: [ { type: 'depGraph', data: {} }, { type: 'dockerfileAnalysis', data: {} }, @@ -370,74 +401,69 @@ export const DockerTests: AcceptanceTests = { image: 'docker-image|ubuntu', }, }, - ], - }, - t, - ); - - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dependencies', 'posts to correct url'); - t.same( - req.body, - { - scanResult: { - facts: [ - { type: 'depGraph', data: {} }, - { type: 'dockerfileAnalysis', data: {} }, - ], - identity: { - type: 'deb', - }, - target: { - image: 'docker-image|ubuntu', - }, }, - }, - 'sends correct payload', - ); - t.same( - spyPlugin.getCall(0).args, - [ + 'sends correct payload', + ); + t.same( + spyPlugin.getCall(0).args, + [ + { + docker: true, + 'exclude-app-vulns': false, + org: 'explicit-org', + projectName: null, + packageManager: null, + path: 'foo:latest', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + ], + 'calls docker plugin with expected arguments', + ); + const policyString = req.body.scanResult.policy; + t.notOk(policyString, 'policy not sent'); + }, + + '`test foo:latest --docker` supports custom policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const policyString = fs.readFileSync( + path.join('npm-package-policy/custom-location', '.snyk'), + 'utf8', + ); + const spyPlugin = stubDockerPluginResponse( + params.ecoSystemPlugins, { - docker: true, - 'exclude-app-vulns': false, - org: 'explicit-org', - projectName: null, - packageManager: null, - path: 'foo:latest', - showVulnPaths: 'some', + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { type: 'dockerfileAnalysis', data: {} }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', + }, + policy: policyString, + }, + ], }, - ], - 'calls docker plugin with expected arguments', - ); - const policyString = req.body.scanResult.policy; - t.notOk(policyString, 'policy not sent'); - }, + t, + ); - '`test foo:latest --docker` supports custom policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const policyString = fs.readFileSync( - path.join('npm-package-policy/custom-location', '.snyk'), - 'utf8', - ); - const spyPlugin = stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + 'policy-path': 'npm-package-policy/custom-location', + }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dependencies', 'posts to correct url'); + t.same( + req.body, + { + scanResult: { facts: [ { type: 'depGraph', data: {} }, { type: 'dockerfileAnalysis', data: {} }, @@ -450,54 +476,27 @@ export const DockerTests: AcceptanceTests = { }, policy: policyString, }, - ], - }, - t, - ); - - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - 'policy-path': 'npm-package-policy/custom-location', - }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dependencies', 'posts to correct url'); - t.same( - req.body, - { - scanResult: { - facts: [ - { type: 'depGraph', data: {} }, - { type: 'dockerfileAnalysis', data: {} }, - ], - identity: { - type: 'deb', - }, - target: { - image: 'docker-image|ubuntu', - }, - policy: policyString, }, - }, - 'sends correct payload', - ); - t.same( - spyPlugin.getCall(0).args, - [ - { - docker: true, - 'exclude-app-vulns': false, - org: 'explicit-org', - projectName: null, - packageManager: null, - path: 'foo:latest', - showVulnPaths: 'some', - 'policy-path': 'npm-package-policy/custom-location', - }, - ], - 'calls docker plugin with expected arguments', - ); - }, + 'sends correct payload', + ); + t.same( + spyPlugin.getCall(0).args, + [ + { + docker: true, + 'exclude-app-vulns': false, + org: 'explicit-org', + projectName: null, + packageManager: null, + path: 'foo:latest', + showVulnPaths: 'some', + maxVulnPaths: undefined, + 'policy-path': 'npm-package-policy/custom-location', + }, + ], + 'calls docker plugin with expected arguments', + ); + }, '`test foo:latest --docker with binaries`': (params) => async (t) => { const spyPlugin = stubDockerPluginResponse( @@ -574,224 +573,220 @@ export const DockerTests: AcceptanceTests = { packageManager: null, path: 'foo:latest', showVulnPaths: 'some', + maxVulnPaths: undefined, }, ], 'calls docker plugin with expected arguments', ); }, - '`test foo:latest --docker with binaries vulnerabilities`': ( - params, - ) => async (t) => { - stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { - facts: [ - { type: 'depGraph', data: {} }, - { type: 'dockerfileAnalysis', data: {} }, - { - type: 'keyBinariesHashes', - data: [ - '9191fbcdcc737314df97c5016a841199b743ac3fa9959dfade38e17bfdaf30b5', - ], + '`test foo:latest --docker with binaries vulnerabilities`': + (params) => async (t) => { + stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { type: 'dockerfileAnalysis', data: {} }, + { + type: 'keyBinariesHashes', + data: [ + '9191fbcdcc737314df97c5016a841199b743ac3fa9959dfade38e17bfdaf30b5', + ], + }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', }, - ], - identity: { - type: 'deb', - }, - target: { - image: 'docker-image|ubuntu', }, - }, - ], - }, - t, - ); - - const vulns = require(getFixturePath('docker/find-result-binaries.json')); - params.server.setNextResponse(vulns); - - try { - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - }); - t.fail('should have found vuln'); - } catch (err) { - const msg = err.message; - t.match( - msg, - 'Tested 3 dependencies for known vulnerabilities, found 3 vulnerabilities', - ); - t.match(msg, 'From: bzip2/libbz2-1.0@1.0.6-8.1'); - t.match( - msg, - 'From: apt/libapt-pkg5.0@1.6.3ubuntu0.1 > bzip2/libbz2-1.0@1.0.6-8.1', - ); - t.match( - msg, - 'Info: https://security.snyk.io/vuln/SNYK-UPSTREAM-NODE-72359', + ], + }, + t, ); - t.notOk( - msg.includes('vulnerable paths'), - 'docker should not includes number of vulnerable paths', + + const vulns = require( + getFixturePath('docker/find-result-binaries.json'), ); - t.match(msg, 'Detected 2 vulnerabilities for node@5.10.1'); - t.match(msg, 'High severity vulnerability found in node'); - t.match(msg, 'Fixed in: 5.13.1'); - t.match(msg, 'Fixed in: 5.15.1'); - } - }, + params.server.setNextResponse(vulns); - '`test foo:latest --docker with dockerfile instructions`': ( - params, - ) => async (t) => { - stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { - facts: [ - { type: 'depGraph', data: {} }, - { - type: 'dockerfileAnalysis', - data: { - dockerfilePackages: { - bzip2: { - installCommand: 'test installCommand', + try { + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + }); + t.fail('should have found vuln'); + } catch (err) { + const msg = err.message; + t.match( + msg, + 'Tested 3 dependencies for known vulnerabilities, found 3 vulnerabilities', + ); + t.match(msg, 'From: bzip2/libbz2-1.0@1.0.6-8.1'); + t.match( + msg, + 'From: apt/libapt-pkg5.0@1.6.3ubuntu0.1 > bzip2/libbz2-1.0@1.0.6-8.1', + ); + t.match( + msg, + 'Info: https://security.snyk.io/vuln/SNYK-UPSTREAM-NODE-72359', + ); + t.notOk( + msg.includes('vulnerable paths'), + 'docker should not includes number of vulnerable paths', + ); + t.match(msg, 'Detected 2 vulnerabilities for node@5.10.1'); + t.match(msg, 'High severity vulnerability found in node'); + t.match(msg, 'Fixed in: 5.13.1'); + t.match(msg, 'Fixed in: 5.15.1'); + } + }, + + '`test foo:latest --docker with dockerfile instructions`': + (params) => async (t) => { + stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { + type: 'dockerfileAnalysis', + data: { + dockerfilePackages: { + bzip2: { + installCommand: 'test installCommand', + }, }, }, }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', }, - ], - identity: { - type: 'deb', - }, - target: { - image: 'docker-image|ubuntu', }, - }, - ], - }, - t, - ); - - const vulns = require(getFixturePath( - 'docker/find-result-remediation.json', - )); - params.server.setNextResponse(vulns); + ], + }, + t, + ); - try { - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - }); - t.fail('should have found vuln'); - } catch (err) { - const msg = err.message; - t.match(msg, "Image layer: 'test installCommand'"); - } - }, + const vulns = require( + getFixturePath('docker/find-result-remediation.json'), + ); + params.server.setNextResponse(vulns); - '`test foo:latest --docker with auto detected instructions`': ( - params, - ) => async (t) => { - stubDockerPluginResponse( - params.ecoSystemPlugins, - { - scanResults: [ - { - facts: [ - { type: 'depGraph', data: {} }, - { - type: 'autoDetectedUserInstructions', - data: { - dockerfilePackages: { - bzip2: { - installCommand: 'test installCommand', + try { + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + }); + t.fail('should have found vuln'); + } catch (err) { + const msg = err.message; + t.match(msg, "Image layer: 'test installCommand'"); + } + }, + + '`test foo:latest --docker with auto detected instructions`': + (params) => async (t) => { + stubDockerPluginResponse( + params.ecoSystemPlugins, + { + scanResults: [ + { + facts: [ + { type: 'depGraph', data: {} }, + { + type: 'autoDetectedUserInstructions', + data: { + dockerfilePackages: { + bzip2: { + installCommand: 'test installCommand', + }, }, }, }, + ], + identity: { + type: 'deb', + }, + target: { + image: 'docker-image|ubuntu', }, - ], - identity: { - type: 'deb', - }, - target: { - image: 'docker-image|ubuntu', }, - }, - ], - }, - t, - ); + ], + }, + t, + ); - const vulns = require(getFixturePath( - 'docker/find-result-remediation.json', - )); - params.server.setNextResponse(vulns); + const vulns = require( + getFixturePath('docker/find-result-remediation.json'), + ); + params.server.setNextResponse(vulns); - try { - await params.cli.test('foo:latest', { - docker: true, - org: 'explicit-org', - }); - t.fail('should have found vuln'); - } catch (err) { - const msg = err.message; - t.match(msg, "Image layer: 'test installCommand'"); - } - }, + try { + await params.cli.test('foo:latest', { + docker: true, + org: 'explicit-org', + }); + t.fail('should have found vuln'); + } catch (err) { + const msg = err.message; + t.match(msg, "Image layer: 'test installCommand'"); + } + }, '`container test alpine --sarif `': (params, utils) => async (t) => { const testableObject = await testSarif(t, utils, params, { sarif: true, }); const results = JSON.parse(testableObject.message); - const sarifResults = require(getFixturePath( - 'docker/sarif-container-result.json', - )); - t.same(results, sarifResults, 'stdout containing sarif results'); - t.end(); - }, - - '`container test alpine --file=Dockerfile --sarif `': ( - params, - utils, - ) => async (t) => { - const testableObject = await testSarif(t, utils, params, { - sarif: true, - file: 'Dockerfile', - }); - const results = JSON.parse(testableObject.message); - const sarifResults = require(getFixturePath( - 'docker/sarif-with-file-container-result.json', - )); + const sarifResults = require( + getFixturePath('docker/sarif-container-result.json'), + ); t.same(results, sarifResults, 'stdout containing sarif results'); t.end(); }, - '`test --docker --file=Dockerfile --sarif --sarif-output-file`': ( - params, - utils, - ) => async (t) => { - const testableObject = await testSarif(t, utils, params, { - sarif: true, - 'sarif-output-file': 'sarif-test-file.json', - }); - const results = JSON.parse(testableObject.message); - const sarifStringifiedResults = JSON.parse( - testableObject.sarifStringifiedResults, - ); - t.same( - results, - sarifStringifiedResults, - 'stdout and stringified sarif results are the same', - ); - t.end(); - }, + '`container test alpine --file=Dockerfile --sarif `': + (params, utils) => async (t) => { + const testableObject = await testSarif(t, utils, params, { + sarif: true, + file: 'Dockerfile', + }); + const results = JSON.parse(testableObject.message); + const sarifResults = require( + getFixturePath('docker/sarif-with-file-container-result.json'), + ); + t.same(results, sarifResults, 'stdout containing sarif results'); + t.end(); + }, + + '`test --docker --file=Dockerfile --sarif --sarif-output-file`': + (params, utils) => async (t) => { + const testableObject = await testSarif(t, utils, params, { + sarif: true, + 'sarif-output-file': 'sarif-test-file.json', + }); + const results = JSON.parse(testableObject.message); + const sarifStringifiedResults = JSON.parse( + testableObject.sarifStringifiedResults, + ); + t.same( + results, + sarifStringifiedResults, + 'stdout and stringified sarif results are the same', + ); + t.end(); + }, '`test --docker doesnotexist`': (params) => async (t) => { try { diff --git a/test/tap/cli-test/cli-test.elixir.spec.ts b/test/tap/cli-test/cli-test.elixir.spec.ts index 2c0ba7323c..72c3537271 100644 --- a/test/tap/cli-test/cli-test.elixir.spec.ts +++ b/test/tap/cli-test/cli-test.elixir.spec.ts @@ -5,116 +5,114 @@ import * as depGraphLib from '@snyk/dep-graph'; export const ElixirTests: AcceptanceTests = { language: 'Elixir', tests: { - '`test elixir --file=mix.exs`': (params, utils, snykHttpClient) => async ( - t, - ) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - scannedProjects: await getScannedProjects(), - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'mix.exs', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('hex').returns(plugin); - - await params.cli.test('elixir-hex', { - file: 'mix.exs', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'hex'); - t.equal(req.body.targetFile, 'mix.exs', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'elixir-hex', - 'mix.exs', - { - args: null, - file: 'mix.exs', - org: null, - projectName: null, - packageManager: 'hex', - path: 'elixir-hex', - showVulnPaths: 'some', + '`test elixir --file=mix.exs`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + scannedProjects: await getScannedProjects(), + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'mix.exs', + }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('hex').returns(plugin); - '`test elixir-hex` auto-detects hex': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - scannedProjects: await getScannedProjects(), - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'mix.exs', + await params.cli.test('elixir-hex', { + file: 'mix.exs', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'hex'); + t.equal(req.body.targetFile, 'mix.exs', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'elixir-hex', + 'mix.exs', + { + args: null, + file: 'mix.exs', + org: null, + projectName: null, + packageManager: 'hex', + path: 'elixir-hex', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + snykHttpClient, + ], + 'calls golang plugin', + ); + }, - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('hex').returns(plugin); + '`test elixir-hex` auto-detects hex': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + scannedProjects: await getScannedProjects(), + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'mix.exs', + }, + }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - await params.cli.test('elixir-hex'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('hex').returns(plugin); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'hex'); - t.equal(req.body.targetFile, 'mix.exs', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'elixir-hex', - 'mix.exs', - { - args: null, - file: 'mix.exs', - org: null, - projectName: null, - packageManager: 'hex', - path: 'elixir-hex', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls elixir-hex plugin', - ); - }, + await params.cli.test('elixir-hex'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'hex'); + t.equal(req.body.targetFile, 'mix.exs', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'elixir-hex', + 'mix.exs', + { + args: null, + file: 'mix.exs', + org: null, + projectName: null, + packageManager: 'hex', + path: 'elixir-hex', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls elixir-hex plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.generic.spec.ts b/test/tap/cli-test/cli-test.generic.spec.ts index 46dfec3a54..cb96d8d448 100644 --- a/test/tap/cli-test/cli-test.generic.spec.ts +++ b/test/tap/cli-test/cli-test.generic.spec.ts @@ -56,64 +56,58 @@ export const GenericTests: AcceptanceTests = { } }, - 'userMessage and error code correctly bubbles with npm': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('npm-package', { org: 'missing-org' }); - t.fail('expect to err'); - } catch (err) { - t.equal( - err.userMessage, - 'Org missing-org was not found or you may not have the correct permissions', - 'got correct err message', - ); - t.equal(err.code, 404); - } - t.end(); - }, + 'userMessage and error code correctly bubbles with npm': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('npm-package', { org: 'missing-org' }); + t.fail('expect to err'); + } catch (err) { + t.equal( + err.userMessage, + 'Org missing-org was not found or you may not have the correct permissions', + 'got correct err message', + ); + t.equal(err.code, 404); + } + t.end(); + }, - 'userMessage and error code correctly bubbles with npm and json output': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('npm-package', { - org: 'missing-org', - json: true, - }); - t.fail('expect to err'); - } catch (err) { - t.match( - err.jsonStringifiedResults, - 'Org missing-org was not found or you may not have the correct permissions', - 'got correct err message', - ); - t.equal(err.code, 404); - } - t.end(); - }, + 'userMessage and error code correctly bubbles with npm and json output': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('npm-package', { + org: 'missing-org', + json: true, + }); + t.fail('expect to err'); + } catch (err) { + t.match( + err.jsonStringifiedResults, + 'Org missing-org was not found or you may not have the correct permissions', + 'got correct err message', + ); + t.equal(err.code, 404); + } + t.end(); + }, - 'userMessage correctly bubbles with everything other than npm': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('ruby-app', { org: 'missing-org' }); - t.fail('expect to err'); - } catch (err) { - t.equal( - err.userMessage, - 'Org missing-org was not found or you may not have the correct permissions', - 'got correct err message', - ); - } - t.end(); - }, + 'userMessage correctly bubbles with everything other than npm': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('ruby-app', { org: 'missing-org' }); + t.fail('expect to err'); + } catch (err) { + t.equal( + err.userMessage, + 'Org missing-org was not found or you may not have the correct permissions', + 'got correct err message', + ); + } + t.end(); + }, /** * Remote package `test` @@ -143,46 +137,43 @@ export const GenericTests: AcceptanceTests = { t.notMatch(output, 'snyk wizard', 'does not suggest `snyk wizard`'); }, - '`test sinatra --registry=rubygems` sends remote Rubygems request:': ( - params, - ) => async (t) => { - await params.cli.test('sinatra', { registry: 'rubygems', org: 'ACME' }); - const req = params.server.popRequest(); - t.equal(req.method, 'GET', 'makes GET request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/vuln/rubygems/sinatra', 'gets from correct url'); - t.equal(req.query.org, 'ACME', 'org sent as a query in request'); - }, + '`test sinatra --registry=rubygems` sends remote Rubygems request:': + (params) => async (t) => { + await params.cli.test('sinatra', { registry: 'rubygems', org: 'ACME' }); + const req = params.server.popRequest(); + t.equal(req.method, 'GET', 'makes GET request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/vuln/rubygems/sinatra', 'gets from correct url'); + t.equal(req.query.org, 'ACME', 'org sent as a query in request'); + }, /** * Local source `test` */ - '`test /` test for non-existent with path specified': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('/'); - t.fail('should have failed'); - } catch (err) { - t.pass('throws err'); - t.match( - err.message, - 'Could not detect supported target files in /.' + - '\nPlease see our documentation for supported' + - ' languages and target files: ' + - 'https://snyk.co/udVgQ' + - ' and make sure you' + - ' are in the right directory.', - ); - } - }, + '`test /` test for non-existent with path specified': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('/'); + t.fail('should have failed'); + } catch (err) { + t.pass('throws err'); + t.match( + err.message, + 'Could not detect supported target files in /.' + + '\nPlease see our documentation for supported' + + ' languages and target files: ' + + 'https://snyk.co/udVgQ' + + ' and make sure you' + + ' are in the right directory.', + ); + } + }, '`test empty --file=readme.md`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -294,26 +285,24 @@ export const GenericTests: AcceptanceTests = { }); }, - '`test npm-package-with-git-url ` handles git url with patch policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces('npm-package-with-git-url'); - const vulns = readJSON( - getFixturePath('npm-package-with-git-url/test-graph-result.json'), - ); - params.server.setNextResponse(vulns); - try { - await params.cli.test(); - t.fail('should fail'); - } catch (res) { - params.server.popRequest(); + '`test npm-package-with-git-url ` handles git url with patch policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces('npm-package-with-git-url'); + const vulns = readJSON( + getFixturePath('npm-package-with-git-url/test-graph-result.json'), + ); + params.server.setNextResponse(vulns); + try { + await params.cli.test(); + t.fail('should fail'); + } catch (res) { + params.server.popRequest(); - t.match(res.message, 'for known vulnerabilities', 'found results'); + t.match(res.message, 'for known vulnerabilities', 'found results'); - t.match(res.message, 'Local Snyk policy: found', 'found policy file'); - } - }, + t.match(res.message, 'Local Snyk policy: found', 'found policy file'); + } + }, '`test --insecure`': (params, utils) => async (t) => { t.plan(2); diff --git a/test/tap/cli-test/cli-test.go.spec.ts b/test/tap/cli-test/cli-test.go.spec.ts index 58997114e6..7c4209711a 100644 --- a/test/tap/cli-test/cli-test.go.spec.ts +++ b/test/tap/cli-test/cli-test.go.spec.ts @@ -4,335 +4,323 @@ import { AcceptanceTests } from '../cli-test.acceptance.test'; export const GoTests: AcceptanceTests = { language: 'Go', tests: { - '`test golang-gomodules --file=go.mod`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'go.mod', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gomodules').returns(plugin); - - await params.cli.test('golang-gomodules', { - file: 'go.mod', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'gomodules'); - t.equal(req.body.targetFile, 'go.mod', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-gomodules', - 'go.mod', - { - args: null, - file: 'go.mod', - org: null, - projectName: null, - packageManager: 'gomodules', - path: 'golang-gomodules', - showVulnPaths: 'some', + '`test golang-gomodules --file=go.mod`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'go.mod', + }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test golang-app` auto-detects golang-gomodules': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'go.mod', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gomodules').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gomodules').returns(plugin); + await params.cli.test('golang-gomodules', { + file: 'go.mod', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'gomodules'); + t.equal(req.body.targetFile, 'go.mod', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-gomodules', + 'go.mod', + { + args: null, + file: 'go.mod', + org: null, + projectName: null, + packageManager: 'gomodules', + path: 'golang-gomodules', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang plugin', + ); + }, - await params.cli.test('golang-gomodules'); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'gomodules'); - t.equal(req.body.targetFile, 'go.mod', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-gomodules', - 'go.mod', - { - args: null, - file: 'go.mod', - org: null, - projectName: null, - packageManager: 'gomodules', - path: 'golang-gomodules', - showVulnPaths: 'some', + '`test golang-app` auto-detects golang-gomodules': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'go.mod', + }, + }; }, - snykHttpClient, - ], - 'calls golang-gomodules plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test golang-app --file=Gopkg.lock`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'Gopkg.lock', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gomodules').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('golangdep').returns(plugin); + await params.cli.test('golang-gomodules'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'gomodules'); + t.equal(req.body.targetFile, 'go.mod', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-gomodules', + 'go.mod', + { + args: null, + file: 'go.mod', + org: null, + projectName: null, + packageManager: 'gomodules', + path: 'golang-gomodules', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang-gomodules plugin', + ); + }, - await params.cli.test('golang-app', { - file: 'Gopkg.lock', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'golangdep'); - t.equal(req.body.targetFile, 'Gopkg.lock', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-app', - 'Gopkg.lock', - { - args: null, - file: 'Gopkg.lock', - org: null, - projectName: null, - packageManager: 'golangdep', - path: 'golang-app', - showVulnPaths: 'some', + '`test golang-app --file=Gopkg.lock`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'Gopkg.lock', + }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test golang-app --file=vendor/vendor.json`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'vendor/vendor.json', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('golangdep').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('govendor').returns(plugin); + await params.cli.test('golang-app', { + file: 'Gopkg.lock', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'golangdep'); + t.equal(req.body.targetFile, 'Gopkg.lock', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-app', + 'Gopkg.lock', + { + args: null, + file: 'Gopkg.lock', + org: null, + projectName: null, + packageManager: 'golangdep', + path: 'golang-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang plugin', + ); + }, - await params.cli.test('golang-app', { - file: 'vendor/vendor.json', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'govendor'); - t.equal(req.body.targetFile, 'vendor/vendor.json', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-app', - 'vendor/vendor.json', - { - args: null, - file: 'vendor/vendor.json', - org: null, - projectName: null, - packageManager: 'govendor', - path: 'golang-app', - showVulnPaths: 'some', + '`test golang-app --file=vendor/vendor.json`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'vendor/vendor.json', + }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test golang-app` auto-detects golang/dep': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'Gopkg.lock', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('govendor').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('golangdep').returns(plugin); + await params.cli.test('golang-app', { + file: 'vendor/vendor.json', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'govendor'); + t.equal(req.body.targetFile, 'vendor/vendor.json', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-app', + 'vendor/vendor.json', + { + args: null, + file: 'vendor/vendor.json', + org: null, + projectName: null, + packageManager: 'govendor', + path: 'golang-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang plugin', + ); + }, - await params.cli.test('golang-app'); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'golangdep'); - t.equal(req.body.targetFile, 'Gopkg.lock', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-app', - 'Gopkg.lock', - { - args: null, - file: 'Gopkg.lock', - org: null, - projectName: null, - packageManager: 'golangdep', - path: 'golang-app', - showVulnPaths: 'some', + '`test golang-app` auto-detects golang/dep': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'Gopkg.lock', + }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test golang-app-govendor` auto-detects govendor': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('golangdep').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('govendor').returns(plugin); + await params.cli.test('golang-app'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'golangdep'); + t.equal(req.body.targetFile, 'Gopkg.lock', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-app', + 'Gopkg.lock', + { + args: null, + file: 'Gopkg.lock', + org: null, + projectName: null, + packageManager: 'golangdep', + path: 'golang-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang plugin', + ); + }, - await params.cli.test('golang-app-govendor'); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'govendor'); - t.same( - spyPlugin.getCall(0).args, - [ - 'golang-app-govendor', - 'vendor/vendor.json', - { - args: null, - file: 'vendor/vendor.json', - org: null, - projectName: null, - packageManager: 'govendor', - path: 'golang-app-govendor', - showVulnPaths: 'some', + '`test golang-app-govendor` auto-detects govendor': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls golang plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('govendor').returns(plugin); + + await params.cli.test('golang-app-govendor'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'govendor'); + t.same( + spyPlugin.getCall(0).args, + [ + 'golang-app-govendor', + 'vendor/vendor.json', + { + args: null, + file: 'vendor/vendor.json', + org: null, + projectName: null, + packageManager: 'govendor', + path: 'golang-app-govendor', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls golang plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.gradle.spec.ts b/test/tap/cli-test/cli-test.gradle.spec.ts index 258748eba5..4848900ed1 100644 --- a/test/tap/cli-test/cli-test.gradle.spec.ts +++ b/test/tap/cli-test/cli-test.gradle.spec.ts @@ -6,52 +6,50 @@ import { CommandResult } from '../../../src/cli/commands/types'; export const GradleTests: AcceptanceTests = { language: 'Gradle', tests: { - '`test gradle-kotlin-dsl-app` returns correct meta': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); + '`test gradle-kotlin-dsl-app` returns correct meta': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; + }, + }; + sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); - const commandResult: CommandResult = await params.cli.test( - 'gradle-kotlin-dsl-app', - ); - const res: string = commandResult.getDisplayResults(); - const meta = res.slice(res.indexOf('Organization:')).split('\n'); - t.match(meta[0], /Organization:\s+test-org/, 'organization displayed'); - t.match( - meta[1], - /Package manager:\s+gradle/, - 'package manager displayed', - ); - t.match( - meta[2], - /Target file:\s+build.gradle.kts/, - 'target file displayed', - ); - t.match(meta[3], /Open source:\s+no/, 'open source displayed'); - t.match( - meta[4], - /Project path:\s+gradle-kotlin-dsl-app/, - 'path displayed', - ); - t.notMatch( - meta[5], - /Local Snyk policy:\s+found/, - 'local policy not displayed', - ); - }, + const commandResult: CommandResult = await params.cli.test( + 'gradle-kotlin-dsl-app', + ); + const res: string = commandResult.getDisplayResults(); + const meta = res.slice(res.indexOf('Organization:')).split('\n'); + t.match(meta[0], /Organization:\s+test-org/, 'organization displayed'); + t.match( + meta[1], + /Package manager:\s+gradle/, + 'package manager displayed', + ); + t.match( + meta[2], + /Target file:\s+build.gradle.kts/, + 'target file displayed', + ); + t.match(meta[3], /Open source:\s+no/, 'open source displayed'); + t.match( + meta[4], + /Project path:\s+gradle-kotlin-dsl-app/, + 'path displayed', + ); + t.notMatch( + meta[5], + /Local Snyk policy:\s+found/, + 'local policy not displayed', + ); + }, '`test gradle-app` returns correct meta': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -92,187 +90,188 @@ export const GradleTests: AcceptanceTests = { ); }, - '`test gradle-app --all-sub-projects` sends `allSubProjects` argument to plugin': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { plugin: { name: 'gradle' }, package: {} }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); + '`test gradle-app --all-sub-projects` sends `allSubProjects` argument to plugin': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { plugin: { name: 'gradle' }, package: {} }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); - await params.cli.test('gradle-app', { - allSubProjects: true, - }); - t.ok(((spyPlugin.args[0] as any)[2] as any).allSubProjects); - }, - '`test gradle-app --all-sub-projects` with policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { plugin: { name: 'gradle' }, package: {} }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); - - await params.cli.test('gradle-app', { - allSubProjects: true, - }); - t.ok(((spyPlugin.args[0] as any)[2] as any).allSubProjects); + await params.cli.test('gradle-app', { + allSubProjects: true, + }); + t.ok(((spyPlugin.args[0] as any)[2] as any).allSubProjects); + }, + '`test gradle-app --all-sub-projects` with policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { plugin: { name: 'gradle' }, package: {} }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); - let policyCount = 0; - params.server - .getRequests() - .filter((r) => r.url === '/api/v1/test-dep-graph?org=') - .forEach((req) => { - if ( - req.body.displayTargetFile.endsWith('gradle-multi-project/subproj') - ) { - // TODO: this should return 1 policy when fixed - // uncomment then - // t.match( - // req.body.policy, - // 'SNYK-JAVA-ORGBOUNCYCASTLE-32364', - // 'policy is found & sent', - // ); - t.ok( - req.body.policy, - undefined, - 'policy is not found even though it should be', - ); - policyCount += 1; - } - t.match(req.url, '/test-dep-graph', 'posts to correct url'); + await params.cli.test('gradle-app', { + allSubProjects: true, }); - // TODO: this should return 1 policy when fixed - t.equal(policyCount, 0, 'one sub-project policy found & sent'); - }, + t.ok(((spyPlugin.args[0] as any)[2] as any).allSubProjects); - '`test gradle-app` plugin fails to return package or scannedProjects': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { plugin: { name: 'gradle' } }; - }, - }; - sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); + let policyCount = 0; + params.server + .getRequests() + .filter((r) => r.url === '/api/v1/test-dep-graph?org=') + .forEach((req) => { + if ( + req.body.displayTargetFile.endsWith( + 'gradle-multi-project/subproj', + ) + ) { + // TODO: this should return 1 policy when fixed + // uncomment then + // t.match( + // req.body.policy, + // 'SNYK-JAVA-ORGBOUNCYCASTLE-32364', + // 'policy is found & sent', + // ); + t.ok( + req.body.policy, + undefined, + 'policy is not found even though it should be', + ); + policyCount += 1; + } + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + }); + // TODO: this should return 1 policy when fixed + t.equal(policyCount, 0, 'one sub-project policy found & sent'); + }, - try { - await params.cli.test('gradle-app', {}); - t.fail('expected error'); - } catch (error) { - t.match( - error, - /error getting dependencies from gradle plugin: neither 'package' nor 'scannedProjects' were found/, - 'error found', - ); - } - }, + '`test gradle-app` plugin fails to return package or scannedProjects': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { plugin: { name: 'gradle' } }; + }, + }; + sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); - '`test gradle-app --all-sub-projects` returns correct multi tree meta': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect(): Promise { - return { - plugin: { - meta: { - allSubProjectNames: ['a', 'b'], - }, - name: 'gradle', - }, - scannedProjects: [ - { - depTree: { - name: 'tree0', - version: '1.0.0', - dependencies: { dep1: { name: 'dep1', version: '1' } }, + try { + await params.cli.test('gradle-app', {}); + t.fail('expected error'); + } catch (error) { + t.match( + error, + /error getting dependencies from gradle plugin: neither 'package' nor 'scannedProjects' were found/, + 'error found', + ); + } + }, + + '`test gradle-app --all-sub-projects` returns correct multi tree meta': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect(): Promise { + return { + plugin: { + meta: { + allSubProjectNames: ['a', 'b'], }, + name: 'gradle', }, - { - depTree: { - name: 'tree1', - version: '2.0.0', - dependencies: { dep1: { name: 'dep2', version: '2' } }, + scannedProjects: [ + { + depTree: { + name: 'tree0', + version: '1.0.0', + dependencies: { dep1: { name: 'dep1', version: '1' } }, + }, }, - }, - ], - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('gradle').returns(plugin); - - const commandResult: CommandResult = await params.cli.test('gradle-app', { - allSubProjects: true, - }); - const res = commandResult.getDisplayResults(); - t.ok( - ((spyPlugin.args[0] as any)[2] as any).allSubProjects, - '`allSubProjects` option is sent', - ); + { + depTree: { + name: 'tree1', + version: '2.0.0', + dependencies: { dep1: { name: 'dep2', version: '2' } }, + }, + }, + ], + }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('gradle').returns(plugin); - const tests = res - .split('Testing gradle-app...') - .filter((s) => !!s.trim()); - t.equal(tests.length, 2, 'two projects tested independently'); - t.match( - res, - /Tested 2 projects/, - 'number projects tested displayed properly', - ); - t.notMatch( - res, - /use --all-sub-projects flag to scan all sub-projects/, - 'all-sub-projects flag is NOT suggested as we already scanned with it', - ); - for (let i = 0; i < tests.length; i++) { - const meta = tests[i] - .slice(tests[i].indexOf('Organization:')) - .split('\n'); - t.match(meta[0], /Organization:\s+test-org/, 'organization displayed'); - t.match( - meta[1], - /Package manager:\s+gradle/, - 'package manager displayed', + const commandResult: CommandResult = await params.cli.test( + 'gradle-app', + { + allSubProjects: true, + }, ); + const res = commandResult.getDisplayResults(); + t.ok( + ((spyPlugin.args[0] as any)[2] as any).allSubProjects, + '`allSubProjects` option is sent', + ); + + const tests = res + .split('Testing gradle-app...') + .filter((s) => !!s.trim()); + t.equal(tests.length, 2, 'two projects tested independently'); t.match( - meta[2], - /Target file:\s+build.gradle/, - 'target file displayed', + res, + /Tested 2 projects/, + 'number projects tested displayed properly', ); - t.match(meta[3], /Project name:\s+tree/, 'sub-project displayed'); - t.match(meta[3], `tree${i}`, 'sub-project displayed'); - t.match(meta[4], /Open source:\s+no/, 'open source displayed'); - t.match(meta[5], /Project path:\s+gradle-app/, 'path displayed'); t.notMatch( - meta[6], - /Local Snyk policy:\s+found/, - 'local policy not displayed', + res, + /use --all-sub-projects flag to scan all sub-projects/, + 'all-sub-projects flag is NOT suggested as we already scanned with it', ); - } - }, + for (let i = 0; i < tests.length; i++) { + const meta = tests[i] + .slice(tests[i].indexOf('Organization:')) + .split('\n'); + t.match( + meta[0], + /Organization:\s+test-org/, + 'organization displayed', + ); + t.match( + meta[1], + /Package manager:\s+gradle/, + 'package manager displayed', + ); + t.match( + meta[2], + /Target file:\s+build.gradle/, + 'target file displayed', + ); + t.match(meta[3], /Project name:\s+tree/, 'sub-project displayed'); + t.match(meta[3], `tree${i}`, 'sub-project displayed'); + t.match(meta[4], /Open source:\s+no/, 'open source displayed'); + t.match(meta[5], /Project path:\s+gradle-app/, 'path displayed'); + t.notMatch( + meta[6], + /Local Snyk policy:\s+found/, + 'local policy not displayed', + ); + } + }, }, }; diff --git a/test/tap/cli-test/cli-test.maven.spec.ts b/test/tap/cli-test/cli-test.maven.spec.ts index d44ac34a49..8e346ebd0d 100644 --- a/test/tap/cli-test/cli-test.maven.spec.ts +++ b/test/tap/cli-test/cli-test.maven.spec.ts @@ -23,203 +23,195 @@ function stubExec(t, execOutputFile) { export const MavenTests: AcceptanceTests = { language: 'Maven', tests: { - '`test maven-app --file=pom.xml --dev` sends package info': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - stubExec(t, 'maven-app/mvn-dep-tree-stdout.txt'); - await params.cli.test('maven-app', { - file: 'pom.xml', - org: 'nobelprize.org', - dev: true, - }); + '`test maven-app --file=pom.xml --dev` sends package info': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + stubExec(t, 'maven-app/mvn-dep-tree-stdout.txt'); + await params.cli.test('maven-app', { + file: 'pom.xml', + org: 'nobelprize.org', + dev: true, + }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal( - req.query.org, - 'nobelprize.org', - 'org sent as a query in request', - ); - t.match(req.body.targetFile, undefined, 'target is undefined'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal( + req.query.org, + 'nobelprize.org', + 'org sent as a query in request', + ); + t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = depGraphLib.createFromJSON(req.body.depGraph); - t.equal( - depGraph.rootPkg.name, - 'com.mycompany.app:maven-app', - 'root name', - ); - const pkgs = depGraph.getPkgs().map((x) => `${x.name}@${x.version}`); - t.ok(pkgs.indexOf('com.mycompany.app:maven-app@1.0-SNAPSHOT') >= 0); - t.ok(pkgs.indexOf('axis:axis@1.4') >= 0); - t.ok(pkgs.indexOf('junit:junit@3.8.2') >= 0); - }, + const depGraph = depGraphLib.createFromJSON(req.body.depGraph); + t.equal( + depGraph.rootPkg.name, + 'com.mycompany.app:maven-app', + 'root name', + ); + const pkgs = depGraph.getPkgs().map((x) => `${x.name}@${x.version}`); + t.ok(pkgs.indexOf('com.mycompany.app:maven-app@1.0-SNAPSHOT') >= 0); + t.ok(pkgs.indexOf('axis:axis@1.4') >= 0); + t.ok(pkgs.indexOf('junit:junit@3.8.2') >= 0); + }, - '`test maven-app-with-jars --file=example.jar` sends package info': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('maven').returns(plugin); + '`test maven-app-with-jars --file=example.jar` sends package info': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('maven').returns(plugin); - await params.cli.test('maven-app-with-jars', { - file: 'example.jar', - }); + await params.cli.test('maven-app-with-jars', { + file: 'example.jar', + }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); - t.equal(req.body.depGraph.pkgManager.name, 'maven'); - t.same( - spyPlugin.getCall(0).args, - [ - 'maven-app-with-jars', - 'example.jar', - { - args: null, - file: 'example.jar', - org: null, - projectName: null, - packageManager: 'maven', - path: 'maven-app-with-jars', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls mvn plugin', - ); - }, + t.equal(req.body.depGraph.pkgManager.name, 'maven'); + t.same( + spyPlugin.getCall(0).args, + [ + 'maven-app-with-jars', + 'example.jar', + { + args: null, + file: 'example.jar', + org: null, + projectName: null, + packageManager: 'maven', + path: 'maven-app-with-jars', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls mvn plugin', + ); + }, - '`test maven-app-with-jars --file=example.war` sends package info': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('maven').returns(plugin); + '`test maven-app-with-jars --file=example.war` sends package info': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('maven').returns(plugin); - await params.cli.test('maven-app-with-jars', { - file: 'example.war', - }); + await params.cli.test('maven-app-with-jars', { + file: 'example.war', + }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); - t.equal(req.body.depGraph.pkgManager.name, 'maven'); - t.same( - spyPlugin.getCall(0).args, - [ - 'maven-app-with-jars', - 'example.war', - { - args: null, - file: 'example.war', - org: null, - projectName: null, - packageManager: 'maven', - path: 'maven-app-with-jars', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls mvn plugin', - ); - }, + t.equal(req.body.depGraph.pkgManager.name, 'maven'); + t.same( + spyPlugin.getCall(0).args, + [ + 'maven-app-with-jars', + 'example.war', + { + args: null, + file: 'example.war', + org: null, + projectName: null, + packageManager: 'maven', + path: 'maven-app-with-jars', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls mvn plugin', + ); + }, - '`test maven-app-with-jars --scan-all-unmanaged` sends package info': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('maven').returns(plugin); - await params.cli.test('maven-app-with-jars', { - scanAllUnmanaged: true, - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - t.equal(req.body.depGraph.pkgManager.name, 'maven'); - t.same( - spyPlugin.getCall(0).args, - [ - 'maven-app-with-jars', - undefined, // no specified target file - { - args: null, - // file: undefined, no file - org: null, - projectName: null, - packageManager: 'maven', - path: 'maven-app-with-jars', - showVulnPaths: 'some', - scanAllUnmanaged: true, + '`test maven-app-with-jars --scan-all-unmanaged` sends package info': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls mvn plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('maven').returns(plugin); + await params.cli.test('maven-app-with-jars', { + scanAllUnmanaged: true, + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + t.equal(req.body.depGraph.pkgManager.name, 'maven'); + t.same( + spyPlugin.getCall(0).args, + [ + 'maven-app-with-jars', + undefined, // no specified target file + { + args: null, + // file: undefined, no file + org: null, + projectName: null, + packageManager: 'maven', + path: 'maven-app-with-jars', + showVulnPaths: 'some', + maxVulnPaths: undefined, + scanAllUnmanaged: true, + }, + snykHttpClient, + ], + 'calls mvn plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.npm.spec.ts b/test/tap/cli-test/cli-test.npm.spec.ts index 6f62bf5919..6cbcd81f72 100644 --- a/test/tap/cli-test/cli-test.npm.spec.ts +++ b/test/tap/cli-test/cli-test.npm.spec.ts @@ -5,21 +5,20 @@ import { getFixturePath } from '../../jest/util/getFixturePath'; export const NpmTests: AcceptanceTests = { language: 'NPM', tests: { - '`test npm-package with custom --project-name`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package', { - 'project-name': 'custom-project-name', - }); - const req = params.server.popRequest(); - t.match( - req.body.projectNameOverride, - 'custom-project-name', - 'custom project name is passed', - ); - t.match(req.body.targetFile, undefined, 'target is undefined'); - }, + '`test npm-package with custom --project-name`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package', { + 'project-name': 'custom-project-name', + }); + const req = params.server.popRequest(); + t.match( + req.body.projectNameOverride, + 'custom-project-name', + 'custom project name is passed', + ); + t.match(req.body.targetFile, undefined, 'target is undefined'); + }, '`test npm-package with lockfile v2`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -45,18 +44,18 @@ export const NpmTests: AcceptanceTests = { ); }, - '`test npm-package with lockfile v3 bundled deps`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const res = await params.cli.test('npm-package-lockfile-v3-bundled-deps'); - t.match( - res, - /Tested 570 dependencies for known vulnerabilities/, - 'should succeed scanning npm lock v3 with bundled deps', - ); - }, + '`test npm-package with lockfile v3 bundled deps`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const res = await params.cli.test( + 'npm-package-lockfile-v3-bundled-deps', + ); + t.match( + res, + /Tested 570 dependencies for known vulnerabilities/, + 'should succeed scanning npm lock v3 with bundled deps', + ); + }, 'test npm-package remoteUrl': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -77,29 +76,27 @@ export const NpmTests: AcceptanceTests = { delete process.env.GIT_DIR; }, - 'test npm-package remoteUrl with --remote-repo-url': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - process.env.GIT_DIR = 'npm-package/gitdir'; - await params.cli.test('npm-package', { - 'remote-repo-url': 'foo', - }); - const req = params.server.popRequest(); - t.equal( - req.body.target.remoteUrl, - 'foo', - 'specified remoteUrl is passed', - ); - t.equal( - req.body.target.branch, - 'master', - 'correct branch passed to request', - ); + 'test npm-package remoteUrl with --remote-repo-url': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + process.env.GIT_DIR = 'npm-package/gitdir'; + await params.cli.test('npm-package', { + 'remote-repo-url': 'foo', + }); + const req = params.server.popRequest(); + t.equal( + req.body.target.remoteUrl, + 'foo', + 'specified remoteUrl is passed', + ); + t.equal( + req.body.target.branch, + 'master', + 'correct branch passed to request', + ); - delete process.env.GIT_DIR; - }, + delete process.env.GIT_DIR; + }, '`test --file=protect/package.json`': (params) => async (t) => { const res = await params.cli.test(getFixturePath('protect'), { @@ -112,27 +109,37 @@ export const NpmTests: AcceptanceTests = { ); }, - '`test npm-package-policy` returns correct meta': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - const commandResult: CommandResult = await params.cli.test( - 'npm-package-policy', - ); - const res = commandResult.getDisplayResults(); - const meta = res.slice(res.indexOf('Organization:')).split('\n'); - t.match(meta[0], /Organization:\s+test-org/, 'organization displayed'); - t.match(meta[1], /Package manager:\s+npm/, 'package manager displayed'); - t.match(meta[2], /Target file:\s+package.json/, 'target file displayed'); - t.match( - meta[3], - /Project name:\s+custom-policy-location-package/, - 'project name displayed', - ); - t.match(meta[4], /Open source:\s+no/, 'open source displayed'); - t.match(meta[5], /Project path:\s+npm-package-policy/, 'path displayed'); - t.match(meta[6], /Local Snyk policy:\s+found/, 'local policy displayed'); - }, + '`test npm-package-policy` returns correct meta': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const commandResult: CommandResult = + await params.cli.test('npm-package-policy'); + const res = commandResult.getDisplayResults(); + const meta = res.slice(res.indexOf('Organization:')).split('\n'); + t.match(meta[0], /Organization:\s+test-org/, 'organization displayed'); + t.match(meta[1], /Package manager:\s+npm/, 'package manager displayed'); + t.match( + meta[2], + /Target file:\s+package.json/, + 'target file displayed', + ); + t.match( + meta[3], + /Project name:\s+custom-policy-location-package/, + 'project name displayed', + ); + t.match(meta[4], /Open source:\s+no/, 'open source displayed'); + t.match( + meta[5], + /Project path:\s+npm-package-policy/, + 'path displayed', + ); + t.match( + meta[6], + /Local Snyk policy:\s+found/, + 'local policy displayed', + ); + }, '`test npm-package` sends pkg info': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -149,212 +156,191 @@ export const NpmTests: AcceptanceTests = { ); }, - '`test npm-package --file=package-lock.json ` sends pkg info': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package', { file: 'package-lock.json' }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), - 'depGraph looks fine', - ); - }, + '`test npm-package --file=package-lock.json ` sends pkg info': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package', { file: 'package-lock.json' }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), + 'depGraph looks fine', + ); + }, - '`test npm-package --file=package-lock.json --dev` sends pkg info': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package', { - file: 'package-lock.json', - dev: true, - }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'npm-package@1.0.0', - 'ms@0.7.1', - 'debug@2.2.0', - 'object-assign@4.1.1', - ].sort(), - 'depGraph looks fine', - ); - }, + '`test npm-package --file=package-lock.json --dev` sends pkg info': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package', { + file: 'package-lock.json', + dev: true, + }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'npm-package@1.0.0', + 'ms@0.7.1', + 'debug@2.2.0', + 'object-assign@4.1.1', + ].sort(), + 'depGraph looks fine', + ); + }, - '`test npm-out-of-sync` out of sync fails': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('npm-out-of-sync', { dev: true }); - t.fail('Should fail'); - } catch (e) { - t.equal( - e.message, - '\nTesting npm-out-of-sync...\n\n' + - 'Dependency snyk was not found in package-lock.json.' + - ' Your package.json and package-lock.json are probably out of sync.' + - ' Please run "npm install" and try again.', - 'Contains enough info about err', + '`test npm-out-of-sync` out of sync fails': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('npm-out-of-sync', { dev: true }); + t.fail('Should fail'); + } catch (e) { + t.equal( + e.message, + '\nTesting npm-out-of-sync...\n\n' + + 'Dependency snyk was not found in package-lock.json.' + + ' Your package.json and package-lock.json are probably out of sync.' + + ' Please run "npm install" and try again.', + 'Contains enough info about err', + ); + } + }, + + '`test npm-out-of-sync --strict-out-of-sync=false` passes': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-out-of-sync', { + dev: true, + strictOutOfSync: false, + }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'npm-package@1.0.0', + 'object-assign@4.1.1', + 'rewire@^4.0.1', + 'snyk@*', + 'to-array@0.1.4', + ].sort(), + 'depGraph looks fine', ); - } - }, + }, - '`test npm-out-of-sync --strict-out-of-sync=false` passes': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-out-of-sync', { - dev: true, - strictOutOfSync: false, - }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'npm-package@1.0.0', - 'object-assign@4.1.1', - 'rewire@^4.0.1', - 'snyk@*', - 'to-array@0.1.4', - ].sort(), - 'depGraph looks fine', - ); - }, + '`test npm-package-shrinkwrap --file=package-lock.json` with npm-shrinkwrap errors': + (params, utils) => async (t) => { + t.plan(1); + utils.chdirWorkspaces(); + try { + await params.cli.test('npm-package-shrinkwrap', { + file: 'package-lock.json', + }); + t.fail('Should fail'); + } catch (e) { + t.match( + e.message, + '--file=package.json', + 'Contains enough info about err', + ); + } + }, - '`test npm-package-shrinkwrap --file=package-lock.json` with npm-shrinkwrap errors': ( - params, - utils, - ) => async (t) => { - t.plan(1); - utils.chdirWorkspaces(); - try { - await params.cli.test('npm-package-shrinkwrap', { + '`test npm-package-with-subfolder --file=package-lock.json ` picks top-level files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package-with-subfolder', { file: 'package-lock.json', }); - t.fail('Should fail'); - } catch (e) { - t.match( - e.message, - '--file=package.json', - 'Contains enough info about err', + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package-top-level@1.0.0', 'to-array@0.1.4'].sort(), + 'depGraph looks fine', ); - } - }, - - '`test npm-package-with-subfolder --file=package-lock.json ` picks top-level files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package-with-subfolder', { - file: 'package-lock.json', - }); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package-top-level@1.0.0', 'to-array@0.1.4'].sort(), - 'depGraph looks fine', - ); - }, + }, - '`test npm-package-with-subfolder --file=subfolder/package-lock.json ` picks subfolder files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package-with-subfolder', { - file: 'subfolder/package-lock.json', - }); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package-subfolder@1.0.0', 'to-array@0.1.4'].sort(), - 'depGraph looks fine', - ); - }, - '`test npm-package-with-overrides` correctly completes test': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package-with-overrides'); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).includes('semver@6.0.0'), - false, - 'override pkg original version not present', - ); - t.same( - depGraph.pkgs.map((p) => p.id).includes('semver@7.5.2'), - true, - 'override pkg is correct version', - ); - }, - '`test npm-lock-v2-with-npm-prefixed-sub-dep-version` correctly completes test': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-lock-v2-with-npm-prefixed-sub-dep-version'); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).includes('string-width-cjs@4.2.3'), - true, - 'npm prefixed subdep has a numbered version', - ); - }, - '`test npm-lock-v2-with-simple-version-range-override` correctly completes test': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-lock-v2-with-simple-version-range-override'); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.notOk( - depGraph.pkgs - .map((p) => p.id) - .find((el: string) => el.startsWith('uuid@8')), - 'no uuid version matching ^8', - ); - t.ok( - depGraph.pkgs - .map((p) => p.id) - .find((el: string) => el.startsWith('uuid@9')), - 'uuid version matching ^9', - ); - }, - '`test npm-package-with-dist-tag-subdependency` correctly completes test': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('npm-package-with-dist-tag-subdependency'); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).includes('cdktf-cli@0.20.3'), - true, - 'npm subdep with dist tag has a numbered version', - ); - }, + '`test npm-package-with-subfolder --file=subfolder/package-lock.json ` picks subfolder files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package-with-subfolder', { + file: 'subfolder/package-lock.json', + }); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package-subfolder@1.0.0', 'to-array@0.1.4'].sort(), + 'depGraph looks fine', + ); + }, + '`test npm-package-with-overrides` correctly completes test': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package-with-overrides'); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).includes('semver@6.0.0'), + false, + 'override pkg original version not present', + ); + t.same( + depGraph.pkgs.map((p) => p.id).includes('semver@7.5.2'), + true, + 'override pkg is correct version', + ); + }, + '`test npm-lock-v2-with-npm-prefixed-sub-dep-version` correctly completes test': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-lock-v2-with-npm-prefixed-sub-dep-version'); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).includes('string-width-cjs@4.2.3'), + true, + 'npm prefixed subdep has a numbered version', + ); + }, + '`test npm-lock-v2-with-simple-version-range-override` correctly completes test': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-lock-v2-with-simple-version-range-override'); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.notOk( + depGraph.pkgs + .map((p) => p.id) + .find((el: string) => el.startsWith('uuid@8')), + 'no uuid version matching ^8', + ); + t.ok( + depGraph.pkgs + .map((p) => p.id) + .find((el: string) => el.startsWith('uuid@9')), + 'uuid version matching ^9', + ); + }, + '`test npm-package-with-dist-tag-subdependency` correctly completes test': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('npm-package-with-dist-tag-subdependency'); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).includes('cdktf-cli@0.20.3'), + true, + 'npm subdep with dist tag has a numbered version', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.nuget.spec.ts b/test/tap/cli-test/cli-test.nuget.spec.ts index aeda4cf03c..98a6653a78 100644 --- a/test/tap/cli-test/cli-test.nuget.spec.ts +++ b/test/tap/cli-test/cli-test.nuget.spec.ts @@ -24,505 +24,487 @@ export const NugetTests: AcceptanceTests = { } }, - '`test nuget-app-2 auto-detects project.assets.json`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'project.assets.json', - }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app-2'); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app-2', - 'project.assets.json', - { - args: null, - file: 'project.assets.json', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app-2', - showVulnPaths: 'some', + '`test nuget-app-2 auto-detects project.assets.json`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'project.assets.json', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test nuget-app-2.1 auto-detects obj/project.assets.json`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'obj/project.assets.json', + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app-2'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app-2', + 'project.assets.json', + { + args: null, + file: 'project.assets.json', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app-2', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app-2.1'); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app-2.1', - 'obj/project.assets.json', - { - args: null, - file: 'obj/project.assets.json', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app-2.1', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test nuget-app-2.1 auto-detects obj/project.assets.json`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'obj/project.assets.json', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test nuget-app-4 auto-detects packages.config`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'paket.dependencies', - targetRuntime: 'net465s', + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app-2.1'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app-2.1', + 'obj/project.assets.json', + { + args: null, + file: 'obj/project.assets.json', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app-2.1', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app-4'); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app-4', - 'packages.config', - { - args: null, - file: 'packages.config', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app-4', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test nuget-app-4 auto-detects packages.config`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'paket.dependencies', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test nuget-app --file=project.assets.json`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'project.assets.json', - targetRuntime: 'net465s', + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app-4'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app-4', + 'packages.config', + { + args: null, + file: 'packages.config', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app-4', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app', { - file: 'project.assets.json', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.targetFile, 'project.assets.json', 'specifies target'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app', - 'project.assets.json', - { - args: null, - file: 'project.assets.json', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test nuget-app --file=project.assets.json`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'project.assets.json', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, - - '`test nuget-app --file=packages.config`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'packages.config', - targetRuntime: 'net465s', + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app', { + file: 'project.assets.json', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.targetFile, 'project.assets.json', 'specifies target'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app', + 'project.assets.json', + { + args: null, + file: 'project.assets.json', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app', { - file: 'packages.config', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.targetFile, 'packages.config', 'specifies target'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app', - 'packages.config', - { - args: null, - file: 'packages.config', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test nuget-app --file=packages.config`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'packages.config', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, - - '`test nuget-app --file=project.json`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'testplugin', - runtime: 'testruntime', - targetFile: 'project.json', + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app', { + file: 'packages.config', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.targetFile, 'packages.config', 'specifies target'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app', + 'packages.config', + { + args: null, + file: 'packages.config', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('nuget-app', { - file: 'project.json', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.targetFile, 'project.json', 'specifies target'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'nuget-app', - 'project.json', - { - args: null, - file: 'project.json', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'nuget-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test nuget-app --file=project.json`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'testplugin', + runtime: 'testruntime', + targetFile: 'project.json', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, - - '`test paket-app auto-detects paket.dependencies`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'paket.dependencies', - targetRuntime: 'net465s', + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('nuget-app', { + file: 'project.json', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.targetFile, 'project.json', 'specifies target'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'nuget-app', + 'project.json', + { + args: null, + file: 'project.json', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'nuget-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('paket').returns(plugin); - - await params.cli.test('paket-app'); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'paket'); - t.same( - spyPlugin.getCall(0).args, - [ - 'paket-app', - 'paket.dependencies', - { - args: null, - file: 'paket.dependencies', - org: null, - projectName: null, - packageManager: 'paket', - path: 'paket-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test paket-app auto-detects paket.dependencies`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'paket.dependencies', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test paket-obj-app auto-detects obj/project.assets.json if exists`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'paket.dependencies', - targetRuntime: 'net465s', + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('paket').returns(plugin); + + await params.cli.test('paket-app'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'paket'); + t.same( + spyPlugin.getCall(0).args, + [ + 'paket-app', + 'paket.dependencies', + { + args: null, + file: 'paket.dependencies', + org: null, + projectName: null, + packageManager: 'paket', + path: 'paket-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('nuget').returns(plugin); - - await params.cli.test('paket-obj-app'); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'nuget'); - t.same( - spyPlugin.getCall(0).args, - [ - 'paket-obj-app', - 'obj/project.assets.json', - { - args: null, - file: 'obj/project.assets.json', - org: null, - projectName: null, - packageManager: 'nuget', - path: 'paket-obj-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test paket-obj-app auto-detects obj/project.assets.json if exists`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'paket.dependencies', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test paket-app --file=paket.dependencies`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { - name: 'snyk-nuget-plugin', - targetFile: 'paket.dependencies', - targetRuntime: 'net465s', + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('nuget').returns(plugin); + + await params.cli.test('paket-obj-app'); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'nuget'); + t.same( + spyPlugin.getCall(0).args, + [ + 'paket-obj-app', + 'obj/project.assets.json', + { + args: null, + file: 'obj/project.assets.json', + org: null, + projectName: null, + packageManager: 'nuget', + path: 'paket-obj-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('paket').returns(plugin); - - await params.cli.test('paket-app', { - file: 'paket.dependencies', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'paket'); - t.equal(req.body.targetFile, 'paket.dependencies', 'specifies target'); - t.same( - spyPlugin.getCall(0).args, - [ - 'paket-app', - 'paket.dependencies', - { - args: null, - file: 'paket.dependencies', - org: null, - projectName: null, - packageManager: 'paket', - path: 'paket-app', - showVulnPaths: 'some', + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, + + '`test paket-app --file=paket.dependencies`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { + name: 'snyk-nuget-plugin', + targetFile: 'paket.dependencies', + targetRuntime: 'net465s', + }, + }; }, - snykHttpClient, - ], - 'calls nuget plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('paket').returns(plugin); + + await params.cli.test('paket-app', { + file: 'paket.dependencies', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'paket'); + t.equal(req.body.targetFile, 'paket.dependencies', 'specifies target'); + t.same( + spyPlugin.getCall(0).args, + [ + 'paket-app', + 'paket.dependencies', + { + args: null, + file: 'paket.dependencies', + org: null, + projectName: null, + packageManager: 'paket', + path: 'paket-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls nuget plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.python.spec.ts b/test/tap/cli-test/cli-test.python.spec.ts index 16f7158636..a12ec2f3be 100644 --- a/test/tap/cli-test/cli-test.python.spec.ts +++ b/test/tap/cli-test/cli-test.python.spec.ts @@ -6,297 +6,289 @@ import { loadJson } from '../../utils'; export const PythonTests: AcceptanceTests = { language: 'Python', tests: { - '`test pip-app --file=requirements.txt`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - package: {}, - plugin: { name: 'testplugin', runtime: 'testruntime' }, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); - - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('pip').returns(plugin); - - await params.cli.test('pip-app', { - file: 'requirements.txt', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'pip'); - t.same( - spyPlugin.getCall(0).args, - [ - 'pip-app', - 'requirements.txt', - { - args: null, - file: 'requirements.txt', - org: null, - projectName: null, - packageManager: 'pip', - path: 'pip-app', - showVulnPaths: 'some', + '`test pip-app --file=requirements.txt`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + package: {}, + plugin: { name: 'testplugin', runtime: 'testruntime' }, + }; }, - snykHttpClient, - ], - 'calls python plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test pipenv-app --file=Pipfile`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - plugin: { - targetFile: 'Pipfile', - name: 'snyk-python-plugin', - runtime: 'Python', - }, - package: {}, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('pip').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('pip').returns(plugin); + await params.cli.test('pip-app', { + file: 'requirements.txt', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'pip'); + t.same( + spyPlugin.getCall(0).args, + [ + 'pip-app', + 'requirements.txt', + { + args: null, + file: 'requirements.txt', + org: null, + projectName: null, + packageManager: 'pip', + path: 'pip-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls python plugin', + ); + }, - await params.cli.test('pipenv-app', { - file: 'Pipfile', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.targetFile, 'Pipfile', 'specifies target'); - t.equal(req.body.depGraph.pkgManager.name, 'pip'); - t.same( - spyPlugin.getCall(0).args, - [ - 'pipenv-app', - 'Pipfile', - { - args: null, - file: 'Pipfile', - org: null, - projectName: null, - packageManager: 'pip', - path: 'pipenv-app', - showVulnPaths: 'some', + '`test pipenv-app --file=Pipfile`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + plugin: { + targetFile: 'Pipfile', + name: 'snyk-python-plugin', + runtime: 'Python', + }, + package: {}, + }; }, - snykHttpClient, - ], - 'calls python plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - '`test pip-app-transitive-vuln --file=requirements.txt (actionableCliRemediation=false)`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return loadJson('./pip-app-transitive-vuln/inspect-result.json'); - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('pip').returns(plugin); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('pip').returns(plugin); - - params.server.setNextResponse( - loadJson('./pip-app-transitive-vuln/response-without-remediation.json'), - ); - try { - await params.cli.test('pip-app-transitive-vuln', { - file: 'requirements.txt', + await params.cli.test('pipenv-app', { + file: 'Pipfile', }); - t.fail('should throw, since there are vulns'); - } catch (e) { + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); t.equal( - e.message.trim(), - fs - .readFileSync('pip-app-transitive-vuln/cli-output.txt', 'utf8') - .trim(), + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - } - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'pip'); - t.same( - spyPlugin.getCall(0).args, - [ - 'pip-app-transitive-vuln', - 'requirements.txt', - { - args: null, - file: 'requirements.txt', - org: null, - projectName: null, - packageManager: 'pip', - path: 'pip-app-transitive-vuln', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls python plugin', - ); - }, + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.targetFile, 'Pipfile', 'specifies target'); + t.equal(req.body.depGraph.pkgManager.name, 'pip'); + t.same( + spyPlugin.getCall(0).args, + [ + 'pipenv-app', + 'Pipfile', + { + args: null, + file: 'Pipfile', + org: null, + projectName: null, + packageManager: 'pip', + path: 'pipenv-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls python plugin', + ); + }, - '`test pip-app-transitive-vuln --file=requirements.txt (actionableCliRemediation=true)`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return loadJson('./pip-app-transitive-vuln/inspect-result.json'); - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + '`test pip-app-transitive-vuln --file=requirements.txt (actionableCliRemediation=false)`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return loadJson('./pip-app-transitive-vuln/inspect-result.json'); + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('pip').returns(plugin); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('pip').returns(plugin); - params.server.setNextResponse( - loadJson('./pip-app-transitive-vuln/response-with-remediation.json'), - ); - try { - await params.cli.test('pip-app-transitive-vuln', { - file: 'requirements.txt', - }); - t.fail('should throw, since there are vulns'); - } catch (e) { - t.equal( - e.message.trim(), - fs - .readFileSync( - 'pip-app-transitive-vuln/cli-output-actionable-remediation.txt', - 'utf8', - ) - .trim(), + params.server.setNextResponse( + loadJson( + './pip-app-transitive-vuln/response-without-remediation.json', + ), ); - } - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.depGraph.pkgManager.name, 'pip'); - t.same( - spyPlugin.getCall(0).args, - [ - 'pip-app-transitive-vuln', - 'requirements.txt', - { - args: null, + try { + await params.cli.test('pip-app-transitive-vuln', { file: 'requirements.txt', - org: null, - projectName: null, - packageManager: 'pip', - path: 'pip-app-transitive-vuln', - showVulnPaths: 'some', - }, - snykHttpClient, - ], - 'calls python plugin', - ); - }, - '`test setup_py-app --file=setup.py`': ( - params, - utils, - snykHttpClient, - ) => async (t) => { - utils.chdirWorkspaces(); - const plugin = { - async inspect() { - return { - plugin: { - targetFile: 'setup.py', - name: 'snyk-python-plugin', - runtime: 'Python', + }); + t.fail('should throw, since there are vulns'); + } catch (e) { + t.equal( + e.message.trim(), + fs + .readFileSync('pip-app-transitive-vuln/cli-output.txt', 'utf8') + .trim(), + ); + } + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'pip'); + t.same( + spyPlugin.getCall(0).args, + [ + 'pip-app-transitive-vuln', + 'requirements.txt', + { + args: null, + file: 'requirements.txt', + org: null, + projectName: null, + packageManager: 'pip', + path: 'pip-app-transitive-vuln', + showVulnPaths: 'some', + maxVulnPaths: undefined, }, - package: {}, - }; - }, - }; - const spyPlugin = sinon.spy(plugin, 'inspect'); + snykHttpClient, + ], + 'calls python plugin', + ); + }, - const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); - t.teardown(loadPlugin.restore); - loadPlugin.withArgs('pip').returns(plugin); + '`test pip-app-transitive-vuln --file=requirements.txt (actionableCliRemediation=true)`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return loadJson('./pip-app-transitive-vuln/inspect-result.json'); + }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); - await params.cli.test('setup_py-app', { - file: 'setup.py', - }); + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('pip').returns(plugin); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.equal(req.body.targetFile, 'setup.py', 'specifies target'); - t.equal(req.body.depGraph.pkgManager.name, 'pip'); - t.same( - spyPlugin.getCall(0).args, - [ - 'setup_py-app', - 'setup.py', - { - args: null, - file: 'setup.py', - org: null, - projectName: null, - packageManager: 'pip', - path: 'setup_py-app', - showVulnPaths: 'some', + params.server.setNextResponse( + loadJson('./pip-app-transitive-vuln/response-with-remediation.json'), + ); + try { + await params.cli.test('pip-app-transitive-vuln', { + file: 'requirements.txt', + }); + t.fail('should throw, since there are vulns'); + } catch (e) { + t.equal( + e.message.trim(), + fs + .readFileSync( + 'pip-app-transitive-vuln/cli-output-actionable-remediation.txt', + 'utf8', + ) + .trim(), + ); + } + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.depGraph.pkgManager.name, 'pip'); + t.same( + spyPlugin.getCall(0).args, + [ + 'pip-app-transitive-vuln', + 'requirements.txt', + { + args: null, + file: 'requirements.txt', + org: null, + projectName: null, + packageManager: 'pip', + path: 'pip-app-transitive-vuln', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls python plugin', + ); + }, + '`test setup_py-app --file=setup.py`': + (params, utils, snykHttpClient) => async (t) => { + utils.chdirWorkspaces(); + const plugin = { + async inspect() { + return { + plugin: { + targetFile: 'setup.py', + name: 'snyk-python-plugin', + runtime: 'Python', + }, + package: {}, + }; }, - snykHttpClient, - ], - 'calls python plugin', - ); - }, + }; + const spyPlugin = sinon.spy(plugin, 'inspect'); + + const loadPlugin = sinon.stub(params.plugins, 'loadPlugin'); + t.teardown(loadPlugin.restore); + loadPlugin.withArgs('pip').returns(plugin); + + await params.cli.test('setup_py-app', { + file: 'setup.py', + }); + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.equal(req.body.targetFile, 'setup.py', 'specifies target'); + t.equal(req.body.depGraph.pkgManager.name, 'pip'); + t.same( + spyPlugin.getCall(0).args, + [ + 'setup_py-app', + 'setup.py', + { + args: null, + file: 'setup.py', + org: null, + projectName: null, + packageManager: 'pip', + path: 'setup_py-app', + showVulnPaths: 'some', + maxVulnPaths: undefined, + }, + snykHttpClient, + ], + 'calls python plugin', + ); + }, }, }; diff --git a/test/tap/cli-test/cli-test.ruby.spec.ts b/test/tap/cli-test/cli-test.ruby.spec.ts index 9b2e614ec7..1438adaca1 100644 --- a/test/tap/cli-test/cli-test.ruby.spec.ts +++ b/test/tap/cli-test/cli-test.ruby.spec.ts @@ -8,18 +8,17 @@ import * as path from 'path'; export const RubyTests: AcceptanceTests = { language: 'Ruby', tests: { - '`test ruby-app-no-lockfile --file=Gemfile`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('ruby-app-no-lockfile', { file: 'Gemfile' }); - t.fail('should have failed'); - } catch (err) { - t.pass('throws err'); - t.match(err.message, 'Please run `bundle install`', 'shows err'); - } - }, + '`test ruby-app-no-lockfile --file=Gemfile`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('ruby-app-no-lockfile', { file: 'Gemfile' }); + t.fail('should have failed'); + } catch (err) { + t.pass('throws err'); + t.match(err.message, 'Please run `bundle install`', 'shows err'); + } + }, '`test ruby-app --file=Gemfile.lock`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -43,133 +42,125 @@ export const RubyTests: AcceptanceTests = { ); }, - '`test ruby-app-custom-names --file=123.gemfile.lock --package-manager=rubygems`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('ruby-app-custom-names', { - file: '123.gemfile.lock', - packageManager: 'rubygems', - }); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - - const depGraph = req.body.depGraph; - t.equal(depGraph.pkgManager.name, 'rubygems'); - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'crass@1.0.4', - 'lynx@0.4.0', - 'mini_portile2@2.3.0', - 'nokogiri@1.8.5', - 'nokogumbo@1.5.0', - 'ruby-app-custom-names@', - 'sanitize@4.6.2', - 'yard@0.8.0', - ].sort(), - 'depGraph looks fine', - ); - }, - '`test ruby-app-custom-names --file=123.gemfile --package-manager=rubygems`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('ruby-app-custom-names', { - file: '123.gemfile', - packageManager: 'rubygems', - }); - - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); + '`test ruby-app-custom-names --file=123.gemfile.lock --package-manager=rubygems`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('ruby-app-custom-names', { + file: '123.gemfile.lock', + packageManager: 'rubygems', + }); - const depGraph = req.body.depGraph; - t.equal(depGraph.pkgManager.name, 'rubygems'); - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'crass@1.0.4', - 'lynx@0.4.0', - 'mini_portile2@2.3.0', - 'nokogiri@1.8.5', - 'nokogumbo@1.5.0', - 'ruby-app-custom-names@', - 'sanitize@4.6.2', - 'yard@0.8.0', - ].sort(), - 'depGraph looks fine', - ); - }, + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); - '`test ruby-app-custom-names --file=gemfiles/Gemfile.rails-2.3.6 --package-manager=rubygems`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { + const depGraph = req.body.depGraph; + t.equal(depGraph.pkgManager.name, 'rubygems'); + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'crass@1.0.4', + 'lynx@0.4.0', + 'mini_portile2@2.3.0', + 'nokogiri@1.8.5', + 'nokogumbo@1.5.0', + 'ruby-app-custom-names@', + 'sanitize@4.6.2', + 'yard@0.8.0', + ].sort(), + 'depGraph looks fine', + ); + }, + '`test ruby-app-custom-names --file=123.gemfile --package-manager=rubygems`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); await params.cli.test('ruby-app-custom-names', { - file: 'gemfiles/Gemfile.rails-2.3.6', + file: '123.gemfile', packageManager: 'rubygems', }); - } catch (e) { - t.match( - e.message, - 'if this is a custom file name re-run with --file=path/to/custom.gemfile.lock --package-manager=rubygems', + + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - } - }, + t.match(req.url, '/test-dep-graph', 'posts to correct url'); - '`test ruby-app-custom-names --file=gemfiles/Gemfile.rails-2.4.5.lock --package-manager=rubygems`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('ruby-app-custom-names', { - file: 'gemfiles/Gemfile.rails-2.4.5.lock', - packageManager: 'rubygems', - }); + const depGraph = req.body.depGraph; + t.equal(depGraph.pkgManager.name, 'rubygems'); + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'crass@1.0.4', + 'lynx@0.4.0', + 'mini_portile2@2.3.0', + 'nokogiri@1.8.5', + 'nokogumbo@1.5.0', + 'ruby-app-custom-names@', + 'sanitize@4.6.2', + 'yard@0.8.0', + ].sort(), + 'depGraph looks fine', + ); + }, + + '`test ruby-app-custom-names --file=gemfiles/Gemfile.rails-2.3.6 --package-manager=rubygems`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('ruby-app-custom-names', { + file: 'gemfiles/Gemfile.rails-2.3.6', + packageManager: 'rubygems', + }); + } catch (e) { + t.match( + e.message, + 'if this is a custom file name re-run with --file=path/to/custom.gemfile.lock --package-manager=rubygems', + ); + } + }, + + '`test ruby-app-custom-names --file=gemfiles/Gemfile.rails-2.4.5.lock --package-manager=rubygems`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('ruby-app-custom-names', { + file: 'gemfiles/Gemfile.rails-2.4.5.lock', + packageManager: 'rubygems', + }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); - const depGraph = req.body.depGraph; - t.equal(depGraph.pkgManager.name, 'rubygems'); - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'crass@1.0.4', - 'lynx@0.4.0', - 'mini_portile2@2.3.0', - 'nokogiri@1.8.5', - 'nokogumbo@1.5.0', - 'ruby-app-custom-names@', - 'sanitize@4.6.2', - 'yard@0.8.0', - ].sort(), - 'depGraph looks fine', - ); - }, + const depGraph = req.body.depGraph; + t.equal(depGraph.pkgManager.name, 'rubygems'); + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'crass@1.0.4', + 'lynx@0.4.0', + 'mini_portile2@2.3.0', + 'nokogiri@1.8.5', + 'nokogumbo@1.5.0', + 'ruby-app-custom-names@', + 'sanitize@4.6.2', + 'yard@0.8.0', + ].sort(), + 'depGraph looks fine', + ); + }, '`test ruby-app` meta when no vulns': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -240,297 +231,282 @@ export const RubyTests: AcceptanceTests = { } }, - '`test ruby-app-thresholds --severity-threshold=low --json`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); + '`test ruby-app-thresholds --severity-threshold=low --json`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-low-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'low', - json: true, - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'low'); - - const res = JSON.parse(err.message); - - const expected = getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-result-low-severity.json', + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-low-severity.json', + ), ); - t.same( - omit(res, ['vulnerabilities']), - omit(expected, ['vulnerabilities']), - 'metadata is ok', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'low', + json: true, + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'low'); + + const res = JSON.parse(err.message); + + const expected = getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-result-low-severity.json', + ); + + t.same( + omit(res, ['vulnerabilities']), + omit(expected, ['vulnerabilities']), + 'metadata is ok', + ); + t.same( + sortBy(res.vulnerabilities, 'id'), + sortBy(expected.vulnerabilities, 'id'), + 'vulns are the same', + ); + } + }, + + '`test ruby-app-thresholds --severity-threshold=medium`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-medium-severity.json', + ), ); - t.same( - sortBy(res.vulnerabilities, 'id'), - sortBy(expected.vulnerabilities, 'id'), - 'vulns are the same', - ); - } - }, - - '`test ruby-app-thresholds --severity-threshold=medium`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-medium-severity.json', - ), - ); - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'medium', - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'medium'); - - const res = err.message; - - t.match( - res, - 'Tested 7 dependencies for known vulnerabilities, found 5 vulnerabilities, 6 vulnerable paths', - '5 vulns', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'medium', + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'medium'); + + const res = err.message; + + t.match( + res, + 'Tested 7 dependencies for known vulnerabilities, found 5 vulnerabilities, 6 vulnerable paths', + '5 vulns', + ); + } + }, + + '`test ruby-app-thresholds --ignore-policy`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-medium-severity.json', + ), ); - } - }, - '`test ruby-app-thresholds --ignore-policy`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-medium-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - 'ignore-policy': true, - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.ignorePolicy, 'true'); - t.end(); - } - }, - - '`test ruby-app-thresholds --severity-threshold=medium --json`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-medium-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'medium', - json: true, - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'medium'); - - const res = JSON.parse(err.message); - - const expected = getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-result-medium-severity.json', + try { + await params.cli.test('ruby-app-thresholds', { + 'ignore-policy': true, + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.ignorePolicy, 'true'); + t.end(); + } + }, + + '`test ruby-app-thresholds --severity-threshold=medium --json`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-medium-severity.json', + ), ); - t.same( - omit(res, ['vulnerabilities']), - omit(expected, ['vulnerabilities']), - 'metadata is ok', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'medium', + json: true, + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'medium'); + + const res = JSON.parse(err.message); + + const expected = getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-result-medium-severity.json', + ); + + t.same( + omit(res, ['vulnerabilities']), + omit(expected, ['vulnerabilities']), + 'metadata is ok', + ); + t.same( + sortBy(res.vulnerabilities, 'id'), + sortBy(expected.vulnerabilities, 'id'), + 'vulns are the same', + ); + } + }, + + '`test ruby-app-thresholds --severity-threshold=high': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-high-severity.json', + ), ); - t.same( - sortBy(res.vulnerabilities, 'id'), - sortBy(expected.vulnerabilities, 'id'), - 'vulns are the same', - ); - } - }, - - '`test ruby-app-thresholds --severity-threshold=high': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-high-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'high', - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'high'); - - const res = err.message; - - t.match( - res, - 'Tested 7 dependencies for known vulnerabilities, found 3 vulnerabilities, 4 vulnerable paths', - '3 vulns', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'high', + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'high'); + + const res = err.message; + + t.match( + res, + 'Tested 7 dependencies for known vulnerabilities, found 3 vulnerabilities, 4 vulnerable paths', + '3 vulns', + ); + } + }, + + '`test ruby-app-thresholds --severity-threshold=high --json`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-high-severity.json', + ), ); - } - }, - '`test ruby-app-thresholds --severity-threshold=high --json`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-high-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'high', - json: true, - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'high'); - - const res = JSON.parse(err.message); - - const expected = getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-result-high-severity.json', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'high', + json: true, + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'high'); + + const res = JSON.parse(err.message); + + const expected = getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-result-high-severity.json', + ); + + t.same( + omit(res, ['vulnerabilities']), + omit(expected, ['vulnerabilities']), + 'metadata is ok', + ); + t.same( + sortBy(res.vulnerabilities, 'id'), + sortBy(expected.vulnerabilities, 'id'), + 'vulns are the same', + ); + } + }, + + '`test ruby-app-thresholds --severity-threshold=critical': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-critical-severity.json', + ), ); - t.same( - omit(res, ['vulnerabilities']), - omit(expected, ['vulnerabilities']), - 'metadata is ok', + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'critical', + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'critical'); + + const res = err.message; + + t.match( + res, + 'Tested 7 dependencies for known vulnerabilities, found 1 vulnerability, 2 vulnerable paths', + '1 vuln', + ); + } + }, + + '`test ruby-app-thresholds --severity-threshold=critical --json`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-graph-result-critical-severity.json', + ), ); - t.same( - sortBy(res.vulnerabilities, 'id'), - sortBy(expected.vulnerabilities, 'id'), - 'vulns are the same', - ); - } - }, - - '`test ruby-app-thresholds --severity-threshold=critical': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-critical-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'critical', - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'critical'); - - const res = err.message; - - t.match( - res, - 'Tested 7 dependencies for known vulnerabilities, found 1 vulnerability, 2 vulnerable paths', - '1 vuln', - ); - } - }, - - '`test ruby-app-thresholds --severity-threshold=critical --json`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-graph-result-critical-severity.json', - ), - ); - - try { - await params.cli.test('ruby-app-thresholds', { - severityThreshold: 'critical', - json: true, - }); - t.fail('should have thrown'); - } catch (err) { - const req = params.server.popRequest(); - t.equal(req.query.severityThreshold, 'critical'); - - const res = JSON.parse(err.message); - - const expected = getWorkspaceJSON( - 'ruby-app-thresholds', - 'test-result-critical-severity.json', - ); - - t.same( - omit(res, ['vulnerabilities']), - omit(expected, ['vulnerabilities']), - 'metadata is ok', - ); - t.same( - sortBy(res.vulnerabilities, 'id'), - sortBy(expected.vulnerabilities, 'id'), - 'vulns are the same', - ); - } - }, + try { + await params.cli.test('ruby-app-thresholds', { + severityThreshold: 'critical', + json: true, + }); + t.fail('should have thrown'); + } catch (err) { + const req = params.server.popRequest(); + t.equal(req.query.severityThreshold, 'critical'); + + const res = JSON.parse(err.message); + + const expected = getWorkspaceJSON( + 'ruby-app-thresholds', + 'test-result-critical-severity.json', + ); + + t.same( + omit(res, ['vulnerabilities']), + omit(expected, ['vulnerabilities']), + 'metadata is ok', + ); + t.same( + sortBy(res.vulnerabilities, 'id'), + sortBy(expected.vulnerabilities, 'id'), + 'vulns are the same', + ); + } + }, '`test ruby-app-policy`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -565,43 +541,42 @@ export const RubyTests: AcceptanceTests = { } }, - '`test ruby-app-policy` with cloud ignores': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - - params.server.setNextResponse( - getWorkspaceJSON( - 'ruby-app-policy', - 'test-graph-result-cloud-ignore.json', - ), - ); - - try { - await params.cli.test('ruby-app-policy', { - json: true, - }); - t.fail('should have thrown'); - } catch (err) { - const res = JSON.parse(err.message); + '`test ruby-app-policy` with cloud ignores': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); - const expected = getWorkspaceJSON( - 'ruby-app-policy', - 'test-result-cloud-ignore.json', + params.server.setNextResponse( + getWorkspaceJSON( + 'ruby-app-policy', + 'test-graph-result-cloud-ignore.json', + ), ); - t.same( - omit(res, ['vulnerabilities']), - omit(expected, ['vulnerabilities']), - 'metadata is ok', - ); - t.same( - sortBy(res.vulnerabilities, 'id'), - sortBy(expected.vulnerabilities, 'id'), - 'vulns are the same', - ); - } - }, + try { + await params.cli.test('ruby-app-policy', { + json: true, + }); + t.fail('should have thrown'); + } catch (err) { + const res = JSON.parse(err.message); + + const expected = getWorkspaceJSON( + 'ruby-app-policy', + 'test-result-cloud-ignore.json', + ); + + t.same( + omit(res, ['vulnerabilities']), + omit(expected, ['vulnerabilities']), + 'metadata is ok', + ); + t.same( + sortBy(res.vulnerabilities, 'id'), + sortBy(expected.vulnerabilities, 'id'), + 'vulns are the same', + ); + } + }, '`test ruby-app-no-vulns`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -649,44 +624,44 @@ export const RubyTests: AcceptanceTests = { t.same(res, expected, '--json output is the same'); }, - '`test` returns correct meta when target file specified': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const commandResult: CommandResult = await params.cli.test('ruby-app', { - file: 'Gemfile.lock', - }); - const res = commandResult.getDisplayResults(); - const meta = res.slice(res.indexOf('Organization:')).split('\n'); - t.match(meta[2], /Target file:\s+Gemfile.lock/, 'target file displayed'); - }, + '`test` returns correct meta when target file specified': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const commandResult: CommandResult = await params.cli.test('ruby-app', { + file: 'Gemfile.lock', + }); + const res = commandResult.getDisplayResults(); + const meta = res.slice(res.indexOf('Organization:')).split('\n'); + t.match( + meta[2], + /Target file:\s+Gemfile.lock/, + 'target file displayed', + ); + }, - '`test ruby-gem-no-lockfile --file=ruby-gem.gemspec`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('ruby-gem-no-lockfile', { - file: 'ruby-gem.gemspec', - }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); + '`test ruby-gem-no-lockfile --file=ruby-gem.gemspec`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('ruby-gem-no-lockfile', { + file: 'ruby-gem.gemspec', + }); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); - const depGraph = req.body.depGraph; - t.equal(depGraph.pkgManager.name, 'rubygems'); - t.same( - depGraph.pkgs.map((p) => p.id), - ['ruby-gem-no-lockfile@'], - 'no deps as we dont really support gemspecs yet', - ); - }, + const depGraph = req.body.depGraph; + t.equal(depGraph.pkgManager.name, 'rubygems'); + t.same( + depGraph.pkgs.map((p) => p.id), + ['ruby-gem-no-lockfile@'], + 'no deps as we dont really support gemspecs yet', + ); + }, '`test ruby-gem --file=ruby-gem.gemspec`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -732,31 +707,30 @@ export const RubyTests: AcceptanceTests = { t.notOk(req.body.targetFile, 'does not specify target'); }, - '`test monorepo --file=sub-ruby-app/Gemfile`': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - await params.cli.test('monorepo', { file: 'sub-ruby-app/Gemfile' }); + '`test monorepo --file=sub-ruby-app/Gemfile`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('monorepo', { file: 'sub-ruby-app/Gemfile' }); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); - const depGraph = req.body.depGraph; - t.equal(depGraph.pkgManager.name, 'rubygems'); - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['monorepo@', 'json@2.0.2', 'lynx@0.4.0'].sort(), - 'depGraph looks fine', - ); + const depGraph = req.body.depGraph; + t.equal(depGraph.pkgManager.name, 'rubygems'); + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['monorepo@', 'json@2.0.2', 'lynx@0.4.0'].sort(), + 'depGraph looks fine', + ); - t.notOk(req.body.targetFile, 'does not specify target'); - }, + t.notOk(req.body.targetFile, 'does not specify target'); + }, '`test empty --file=Gemfile`': (params, utils) => async (t) => { utils.chdirWorkspaces(); @@ -772,20 +746,18 @@ export const RubyTests: AcceptanceTests = { ); } }, - '`test large-mono-repo --file=bundler-app/Gemfile`': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const res = await params.cli.test('large-mono-repo', { - file: 'bundler-app/Gemfile', - }); - t.match( - res.getDisplayResults(), - '--all-projects', - 'Suggest using --all-projects', - ); - }, + '`test large-mono-repo --file=bundler-app/Gemfile`': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const res = await params.cli.test('large-mono-repo', { + file: 'bundler-app/Gemfile', + }); + t.match( + res.getDisplayResults(), + '--all-projects', + 'Suggest using --all-projects', + ); + }, '`test monorepo --all-projects`': (params, utils) => async (t) => { utils.chdirWorkspaces(); diff --git a/test/tap/cli-test/cli-test.sbt.spec.ts b/test/tap/cli-test/cli-test.sbt.spec.ts index 20e23734b7..be89fd3ca6 100644 --- a/test/tap/cli-test/cli-test.sbt.spec.ts +++ b/test/tap/cli-test/cli-test.sbt.spec.ts @@ -15,9 +15,9 @@ export const SbtTests: AcceptanceTests = { async inspect() { return { plugin: { name: 'sbt' }, - package: require(getWorkspacePath( - 'sbt-simple-struts/dep-tree.json', - )), + package: require( + getWorkspacePath('sbt-simple-struts/dep-tree.json'), + ), }; }, }; @@ -39,9 +39,9 @@ export const SbtTests: AcceptanceTests = { } catch (err) { const res = JSON.parse(err.message); - const expected = require(getWorkspacePath( - 'sbt-simple-struts/legacy-res-json.json', - )); + const expected = require( + getWorkspacePath('sbt-simple-struts/legacy-res-json.json'), + ); t.same( omit(res, ['vulnerabilities', 'packageManager']), diff --git a/test/tap/cli-test/cli-test.yarn-workspaces.spec.ts b/test/tap/cli-test/cli-test.yarn-workspaces.spec.ts index a98ef2abf3..bd44a4ea60 100644 --- a/test/tap/cli-test/cli-test.yarn-workspaces.spec.ts +++ b/test/tap/cli-test/cli-test.yarn-workspaces.spec.ts @@ -6,342 +6,331 @@ export const YarnWorkspacesTests: AcceptanceTests = { language: 'Yarn', tests: { // yarn lockfile based testing is only supported for node 4+ - '`test yarn-workspace-out-of-sync --yarn-workspaces` out of sync fails': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('yarn-workspace-out-of-sync', { + '`test yarn-workspace-out-of-sync --yarn-workspaces` out of sync fails': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('yarn-workspace-out-of-sync', { + dev: true, + yarnWorkspaces: true, + detectionDepth: 3, + }); + t.fail('Should fail'); + } catch (e) { + t.equal( + e.message, + '\nTesting yarn-workspace-out-of-sync...\n\n' + + 'Dependency snyk@1.320.0 was not found in yarn.lock.' + + ' Your package.json and yarn.lock are probably out of sync.' + + ' Please run "yarn install" and try again.', + 'Contains enough info about err', + ); + } + }, + '`test yarn-workspace-out-of-sync --yarn-workspaces --strict-out-of-sync=false --dev` passes': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspace-out-of-sync', { dev: true, + strictOutOfSync: false, yarnWorkspaces: true, - detectionDepth: 3, }); - t.fail('Should fail'); - } catch (e) { - t.equal( - e.message, - '\nTesting yarn-workspace-out-of-sync...\n\n' + - 'Dependency snyk@1.320.0 was not found in yarn.lock.' + - ' Your package.json and yarn.lock are probably out of sync.' + - ' Please run "yarn install" and try again.', - 'Contains enough info about err', + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', ); - } - }, - '`test yarn-workspace-out-of-sync --yarn-workspaces --strict-out-of-sync=false --dev` passes': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspace-out-of-sync', { - dev: true, - strictOutOfSync: false, - yarnWorkspaces: true, - }); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 3 projects, no vulnerable paths were found.', - 'no vulnerable paths found as both policies detected and applied.', - ); - }, - 'test --yarn-workspaces --detection-depth=5': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspaces', { - yarnWorkspaces: true, - detectionDepth: 5, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - t.match( - result.getDisplayResults(), - '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', - 'correctly showing dep number', - ); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apple-lib', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 4 projects, no vulnerable paths were found.', - 'no vulnerable paths found as both policies detected and applied.', - ); - let policyCount = 0; - const applesWorkspace = - process.platform === 'win32' - ? '\\apples\\package.json' - : 'apples/package.json'; - const tomatoesWorkspace = - process.platform === 'win32' - ? '\\tomatoes\\package.json' - : 'tomatoes/package.json'; - const rootWorkspace = - process.platform === 'win32' - ? '\\yarn-workspaces\\package.json' - : 'yarn-workspaces/package.json'; - - params.server.popRequests(4).forEach((req) => { - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 3 projects, no vulnerable paths were found.', + 'no vulnerable paths found as both policies detected and applied.', + ); + }, + 'test --yarn-workspaces --detection-depth=5': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspaces', { + yarnWorkspaces: true, + detectionDepth: 5, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + t.match( + result.getDisplayResults(), + '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', + 'correctly showing dep number', + ); + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', ); - t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); - t.ok(req.body.depGraph, 'body contains depGraph'); + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: apple-lib', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 4 projects, no vulnerable paths were found.', + 'no vulnerable paths found as both policies detected and applied.', + ); + let policyCount = 0; + const applesWorkspace = + process.platform === 'win32' + ? '\\apples\\package.json' + : 'apples/package.json'; + const tomatoesWorkspace = + process.platform === 'win32' + ? '\\tomatoes\\package.json' + : 'tomatoes/package.json'; + const rootWorkspace = + process.platform === 'win32' + ? '\\yarn-workspaces\\package.json' + : 'yarn-workspaces/package.json'; - if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20160328', - 'policy is as expected', + params.server.popRequests(4).forEach((req) => { + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } else if ( - req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) - ) { - t.notOk(req.body.policy, 'body does not contain policy'); - } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20111130', - 'policy is as expected', + t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); + t.ok(req.body.depGraph, 'body contains depGraph'); + + if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20160328', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } else if ( + req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) + ) { + t.notOk(req.body.policy, 'body does not contain policy'); + } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20111130', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } + t.equal( + req.body.depGraph.pkgManager.name, + 'yarn', + 'depGraph has package manager', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } - t.equal( - req.body.depGraph.pkgManager.name, - 'yarn', - 'depGraph has package manager', + }); + t.equal(policyCount, 2, '2 policies found in a workspace'); + }, + 'test --yarn-workspaces --detection-depth=5 --strict-out-of-sync=false (yarn v2)': + (params, utils) => async (t) => { + // Yarn workspaces for Yarn 2 is only supported on Node 10+ + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspaces-v2', { + yarnWorkspaces: true, + detectionDepth: 5, + strictOutOfSync: false, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + t.match( + result.getDisplayResults(), + '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', + 'correctly showing dep number', ); - }); - t.equal(policyCount, 2, '2 policies found in a workspace'); - }, - 'test --yarn-workspaces --detection-depth=5 --strict-out-of-sync=false (yarn v2)': ( - params, - utils, - ) => async (t) => { - // Yarn workspaces for Yarn 2 is only supported on Node 10+ - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspaces-v2', { - yarnWorkspaces: true, - detectionDepth: 5, - strictOutOfSync: false, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - t.match( - result.getDisplayResults(), - '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', - 'correctly showing dep number', - ); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 3 projects, no vulnerable paths were found.', - 'no vulnerable paths found as both policies detected and applied.', - ); - }, - 'test --yarn-workspaces --detection-depth=5 --strict-out-of-sync=false (yarn v2 with resolutions)': ( - params, - utils, - ) => async (t) => { - // Yarn workspaces for Yarn 2 is only supported on Node 10+ - utils.chdirWorkspaces(); - const result = await params.cli.test('yarn-workspaces-v2-resolutions', { - yarnWorkspaces: true, - detectionDepth: 5, - strictOutOfSync: false, - printDeps: true, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - console.log(result.getDisplayResults()); - t.match( - result.getDisplayResults(), - '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', - 'correctly showing dep number', - ); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 3 projects, no vulnerable paths were found.', - 'no vulnerable paths found as both policies detected and applied.', - ); - }, - 'test --yarn-workspaces --detection-depth=5 multiple workspaces found': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - const result = await params.cli.test({ - yarnWorkspaces: true, - detectionDepth: 5, - strictOutOfSync: false, - }); - const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); - // the parser is used directly - t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); - t.teardown(() => { - loadPlugin.restore(); - }); - t.match( - result.getDisplayResults(), - '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', - 'correctly showing dep number', - ); - t.match(result.getDisplayResults(), 'Package manager: yarn\n'); - t.match( - result.getDisplayResults(), - 'Project name: package.json', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: tomatoes', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Project name: apples', - 'yarn project in output', - ); - t.match( - result.getDisplayResults(), - 'Tested 13 projects, no vulnerable paths were found.', - 'Tested 13 projects', - ); - let policyCount = 0; - const applesWorkspace = - process.platform === 'win32' - ? '\\apples\\package.json' - : 'apples/package.json'; - const tomatoesWorkspace = - process.platform === 'win32' - ? '\\tomatoes\\package.json' - : 'tomatoes/package.json'; - const rootWorkspace = - process.platform === 'win32' - ? '\\yarn-workspaces\\package.json' - : 'yarn-workspaces/package.json'; - - params.server.popRequests(6).forEach((req) => { - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', ); - t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); - t.ok(req.body.depGraph, 'body contains depGraph'); - if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20160328', - 'policy is as expected', + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 3 projects, no vulnerable paths were found.', + 'no vulnerable paths found as both policies detected and applied.', + ); + }, + 'test --yarn-workspaces --detection-depth=5 --strict-out-of-sync=false (yarn v2 with resolutions)': + (params, utils) => async (t) => { + // Yarn workspaces for Yarn 2 is only supported on Node 10+ + utils.chdirWorkspaces(); + const result = await params.cli.test('yarn-workspaces-v2-resolutions', { + yarnWorkspaces: true, + detectionDepth: 5, + strictOutOfSync: false, + printDeps: true, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + console.log(result.getDisplayResults()); + t.match( + result.getDisplayResults(), + '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', + 'correctly showing dep number', + ); + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 3 projects, no vulnerable paths were found.', + 'no vulnerable paths found as both policies detected and applied.', + ); + }, + 'test --yarn-workspaces --detection-depth=5 multiple workspaces found': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + const result = await params.cli.test({ + yarnWorkspaces: true, + detectionDepth: 5, + strictOutOfSync: false, + }); + const loadPlugin = sinon.spy(params.plugins, 'loadPlugin'); + // the parser is used directly + t.ok(loadPlugin.withArgs('yarn').notCalled, 'skips load plugin'); + t.teardown(() => { + loadPlugin.restore(); + }); + t.match( + result.getDisplayResults(), + '✔ Tested 1 dependencies for known vulnerabilities, no vulnerable paths found.', + 'correctly showing dep number', + ); + t.match(result.getDisplayResults(), 'Package manager: yarn\n'); + t.match( + result.getDisplayResults(), + 'Project name: package.json', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: tomatoes', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Project name: apples', + 'yarn project in output', + ); + t.match( + result.getDisplayResults(), + 'Tested 13 projects, no vulnerable paths were found.', + 'Tested 13 projects', + ); + let policyCount = 0; + const applesWorkspace = + process.platform === 'win32' + ? '\\apples\\package.json' + : 'apples/package.json'; + const tomatoesWorkspace = + process.platform === 'win32' + ? '\\tomatoes\\package.json' + : 'tomatoes/package.json'; + const rootWorkspace = + process.platform === 'win32' + ? '\\yarn-workspaces\\package.json' + : 'yarn-workspaces/package.json'; + + params.server.popRequests(6).forEach((req) => { + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } else if ( - req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) - ) { - t.notOk(req.body.policy, 'body does not contain policy'); - } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { - t.match( - req.body.policy, - 'npm:node-uuid:20111130', - 'policy is as expected', + t.match(req.url, '/api/v1/test-dep-graph', 'posts to correct url'); + t.ok(req.body.depGraph, 'body contains depGraph'); + if (req.body.targetFileRelativePath.endsWith(applesWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20160328', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } else if ( + req.body.targetFileRelativePath.endsWith(tomatoesWorkspace) + ) { + t.notOk(req.body.policy, 'body does not contain policy'); + } else if (req.body.targetFileRelativePath.endsWith(rootWorkspace)) { + t.match( + req.body.policy, + 'npm:node-uuid:20111130', + 'policy is as expected', + ); + t.ok(req.body.policy, 'body contains policy'); + policyCount += 1; + } + t.equal( + req.body.depGraph.pkgManager.name, + 'yarn', + 'depGraph has package manager', ); - t.ok(req.body.policy, 'body contains policy'); - policyCount += 1; - } - t.equal( - req.body.depGraph.pkgManager.name, - 'yarn', - 'depGraph has package manager', - ); - }); - t.equal(policyCount, 2, '2 policies found in a workspace'); - }, + }); + t.equal(policyCount, 2, '2 policies found in a workspace'); + }, }, }; diff --git a/test/tap/cli-test/cli-test.yarn.spec.ts b/test/tap/cli-test/cli-test.yarn.spec.ts index c0126fcc43..19ac47360e 100644 --- a/test/tap/cli-test/cli-test.yarn.spec.ts +++ b/test/tap/cli-test/cli-test.yarn.spec.ts @@ -4,318 +4,314 @@ export const YarnTests: AcceptanceTests = { language: 'Yarn', tests: { // yarn lockfile based testing is only supported for node 4+ - '`test yarn-out-of-sync` out of sync fails': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces(); - try { - await params.cli.test('yarn-out-of-sync', { dev: true }); - t.fail('Should fail'); - } catch (e) { - t.equal( - e.message, - '\nTesting yarn-out-of-sync...\n\n' + - 'Dependency snyk@* was not found in yarn.lock.' + - ' Your package.json and yarn.lock are probably out of sync.' + - ' Please run "yarn install" and try again.', - 'Contains enough info about err', - ); - } - }, + '`test yarn-out-of-sync` out of sync fails': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + try { + await params.cli.test('yarn-out-of-sync', { dev: true }); + t.fail('Should fail'); + } catch (e) { + t.equal( + e.message, + '\nTesting yarn-out-of-sync...\n\n' + + 'Dependency snyk@* was not found in yarn.lock.' + + ' Your package.json and yarn.lock are probably out of sync.' + + ' Please run "yarn install" and try again.', + 'Contains enough info about err', + ); + } + }, - '`test yarn-out-of-sync --strict-out-of-sync=false` passes': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('yarn-out-of-sync', { - dev: true, - strictOutOfSync: false, - }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'acorn-jsx@3.0.1', - 'acorn@3.3.0', - 'acorn@5.7.3', - 'ajv-keywords@2.1.1', - 'ajv@5.5.2', - 'ansi-escapes@3.1.0', - 'ansi-regex@2.1.1', - 'ansi-regex@3.0.0', - 'ansi-styles@2.2.1', - 'ansi-styles@3.2.1', - 'argparse@1.0.10', - 'array-union@1.0.2', - 'array-uniq@1.0.3', - 'arrify@1.0.1', - 'babel-code-frame@6.26.0', - 'balanced-match@1.0.0', - 'brace-expansion@1.1.11', - 'buffer-from@1.1.1', - 'caller-path@0.1.0', - 'callsites@0.2.0', - 'chalk@1.1.3', - 'chalk@2.4.1', - 'chardet@0.4.2', - 'circular-json@0.3.3', - 'cli-cursor@2.1.0', - 'cli-width@2.2.0', - 'co@4.6.0', - 'color-convert@1.9.3', - 'color-name@1.1.3', - 'concat-map@0.0.1', - 'concat-stream@1.6.2', - 'core-util-is@1.0.2', - 'cross-spawn@5.1.0', - 'debug@3.2.5', - 'deep-is@0.1.3', - 'del@2.2.2', - 'doctrine@2.1.0', - 'escape-string-regexp@1.0.5', - 'eslint-scope@3.7.3', - 'eslint-visitor-keys@1.0.0', - 'eslint@4.19.1', - 'espree@3.5.4', - 'esprima@4.0.1', - 'esquery@1.0.1', - 'esrecurse@4.2.1', - 'estraverse@4.2.0', - 'esutils@2.0.2', - 'external-editor@2.2.0', - 'fast-deep-equal@1.1.0', - 'fast-json-stable-stringify@2.0.0', - 'fast-levenshtein@2.0.6', - 'figures@2.0.0', - 'file-entry-cache@2.0.0', - 'flat-cache@1.3.0', - 'fs.realpath@1.0.0', - 'functional-red-black-tree@1.0.1', - 'glob@7.1.3', - 'globals@11.7.0', - 'globby@5.0.0', - 'graceful-fs@4.1.11', - 'has-ansi@2.0.0', - 'has-flag@3.0.0', - 'iconv-lite@0.4.24', - 'ignore@3.3.10', - 'imurmurhash@0.1.4', - 'inflight@1.0.6', - 'inherits@2.0.3', - 'inquirer@3.3.0', - 'is-fullwidth-code-point@2.0.0', - 'is-path-cwd@1.0.0', - 'is-path-in-cwd@1.0.1', - 'is-path-inside@1.0.1', - 'is-promise@2.1.0', - 'is-resolvable@1.1.0', - 'isarray@1.0.0', - 'isexe@2.0.0', - 'js-tokens@3.0.2', - 'js-yaml@3.12.0', - 'json-schema-traverse@0.3.1', - 'json-stable-stringify-without-jsonify@1.0.1', - 'levn@0.3.0', - 'lodash@4.17.11', - 'lru-cache@4.1.3', - 'mimic-fn@1.2.0', - 'minimatch@3.0.4', - 'minimist@0.0.8', - 'mkdirp@0.5.1', - 'ms@2.1.1', - 'mute-stream@0.0.7', - 'natural-compare@1.4.0', - 'npm-package@1.0.0', - 'object-assign@4.1.1', - 'once@1.4.0', - 'onetime@2.0.1', - 'optionator@0.8.2', - 'os-tmpdir@1.0.2', - 'path-is-absolute@1.0.1', - 'path-is-inside@1.0.2', - 'pify@2.3.0', - 'pinkie-promise@2.0.1', - 'pinkie@2.0.4', - 'pluralize@7.0.0', - 'prelude-ls@1.1.2', - 'process-nextick-args@2.0.0', - 'progress@2.0.0', - 'pseudomap@1.0.2', - 'readable-stream@2.3.6', - 'regexpp@1.1.0', - 'require-uncached@1.0.3', - 'resolve-from@1.0.1', - 'restore-cursor@2.0.0', - 'rewire@4.0.1', - 'rimraf@2.6.2', - 'run-async@2.3.0', - 'rx-lite-aggregates@4.0.8', - 'rx-lite@4.0.8', - 'safe-buffer@5.1.2', - 'safer-buffer@2.1.2', - 'semver@5.5.1', - 'shebang-command@1.2.0', - 'shebang-regex@1.0.0', - 'signal-exit@3.0.2', - 'slice-ansi@1.0.0', - 'snyk@*', - 'sprintf-js@1.0.3', - 'string-width@2.1.1', - 'string_decoder@1.1.1', - 'strip-ansi@3.0.1', - 'strip-ansi@4.0.0', - 'strip-json-comments@2.0.1', - 'supports-color@2.0.0', - 'supports-color@5.5.0', - 'table@4.0.2', - 'text-table@0.2.0', - 'through@2.3.8', - 'tmp@0.0.33', - 'to-array@0.1.4', - 'type-check@0.3.2', - 'typedarray@0.0.6', - 'util-deprecate@1.0.2', - 'which@1.3.1', - 'wordwrap@1.0.0', - 'wrappy@1.0.2', - 'write@0.2.1', - 'yallist@2.1.2', - ].sort(), - 'depGraph looks fine', - ); - }, - '`test yarn-package --file=yarn-package/yarn.lock ` sends pkg info & policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test({ file: 'yarn-package/yarn.lock' }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.policy, 'npm:debug:20170905', 'policy is found & sent'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), - 'depGraph looks fine', - ); - }, - '`test yarn-package --file=yarn.lock ` sends pkg info & policy': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('yarn-package', { file: 'yarn.lock' }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.policy, 'npm:debug:20170905', 'policy is found & sent'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), - 'depGraph looks fine', - ); - }, - '`test yarn-package` sends pkg info & policy': (params, utils) => async ( - t, - ) => { - utils.chdirWorkspaces('yarn-package'); - await params.cli.test(); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.policy, 'npm:debug:20170905', 'policy is found & sent'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), - 'depGraph looks fine', - ); - }, + '`test yarn-out-of-sync --strict-out-of-sync=false` passes': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('yarn-out-of-sync', { + dev: true, + strictOutOfSync: false, + }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'acorn-jsx@3.0.1', + 'acorn@3.3.0', + 'acorn@5.7.3', + 'ajv-keywords@2.1.1', + 'ajv@5.5.2', + 'ansi-escapes@3.1.0', + 'ansi-regex@2.1.1', + 'ansi-regex@3.0.0', + 'ansi-styles@2.2.1', + 'ansi-styles@3.2.1', + 'argparse@1.0.10', + 'array-union@1.0.2', + 'array-uniq@1.0.3', + 'arrify@1.0.1', + 'babel-code-frame@6.26.0', + 'balanced-match@1.0.0', + 'brace-expansion@1.1.11', + 'buffer-from@1.1.1', + 'caller-path@0.1.0', + 'callsites@0.2.0', + 'chalk@1.1.3', + 'chalk@2.4.1', + 'chardet@0.4.2', + 'circular-json@0.3.3', + 'cli-cursor@2.1.0', + 'cli-width@2.2.0', + 'co@4.6.0', + 'color-convert@1.9.3', + 'color-name@1.1.3', + 'concat-map@0.0.1', + 'concat-stream@1.6.2', + 'core-util-is@1.0.2', + 'cross-spawn@5.1.0', + 'debug@3.2.5', + 'deep-is@0.1.3', + 'del@2.2.2', + 'doctrine@2.1.0', + 'escape-string-regexp@1.0.5', + 'eslint-scope@3.7.3', + 'eslint-visitor-keys@1.0.0', + 'eslint@4.19.1', + 'espree@3.5.4', + 'esprima@4.0.1', + 'esquery@1.0.1', + 'esrecurse@4.2.1', + 'estraverse@4.2.0', + 'esutils@2.0.2', + 'external-editor@2.2.0', + 'fast-deep-equal@1.1.0', + 'fast-json-stable-stringify@2.0.0', + 'fast-levenshtein@2.0.6', + 'figures@2.0.0', + 'file-entry-cache@2.0.0', + 'flat-cache@1.3.0', + 'fs.realpath@1.0.0', + 'functional-red-black-tree@1.0.1', + 'glob@7.1.3', + 'globals@11.7.0', + 'globby@5.0.0', + 'graceful-fs@4.1.11', + 'has-ansi@2.0.0', + 'has-flag@3.0.0', + 'iconv-lite@0.4.24', + 'ignore@3.3.10', + 'imurmurhash@0.1.4', + 'inflight@1.0.6', + 'inherits@2.0.3', + 'inquirer@3.3.0', + 'is-fullwidth-code-point@2.0.0', + 'is-path-cwd@1.0.0', + 'is-path-in-cwd@1.0.1', + 'is-path-inside@1.0.1', + 'is-promise@2.1.0', + 'is-resolvable@1.1.0', + 'isarray@1.0.0', + 'isexe@2.0.0', + 'js-tokens@3.0.2', + 'js-yaml@3.12.0', + 'json-schema-traverse@0.3.1', + 'json-stable-stringify-without-jsonify@1.0.1', + 'levn@0.3.0', + 'lodash@4.17.11', + 'lru-cache@4.1.3', + 'mimic-fn@1.2.0', + 'minimatch@3.0.4', + 'minimist@0.0.8', + 'mkdirp@0.5.1', + 'ms@2.1.1', + 'mute-stream@0.0.7', + 'natural-compare@1.4.0', + 'npm-package@1.0.0', + 'object-assign@4.1.1', + 'once@1.4.0', + 'onetime@2.0.1', + 'optionator@0.8.2', + 'os-tmpdir@1.0.2', + 'path-is-absolute@1.0.1', + 'path-is-inside@1.0.2', + 'pify@2.3.0', + 'pinkie-promise@2.0.1', + 'pinkie@2.0.4', + 'pluralize@7.0.0', + 'prelude-ls@1.1.2', + 'process-nextick-args@2.0.0', + 'progress@2.0.0', + 'pseudomap@1.0.2', + 'readable-stream@2.3.6', + 'regexpp@1.1.0', + 'require-uncached@1.0.3', + 'resolve-from@1.0.1', + 'restore-cursor@2.0.0', + 'rewire@4.0.1', + 'rimraf@2.6.2', + 'run-async@2.3.0', + 'rx-lite-aggregates@4.0.8', + 'rx-lite@4.0.8', + 'safe-buffer@5.1.2', + 'safer-buffer@2.1.2', + 'semver@5.5.1', + 'shebang-command@1.2.0', + 'shebang-regex@1.0.0', + 'signal-exit@3.0.2', + 'slice-ansi@1.0.0', + 'snyk@*', + 'sprintf-js@1.0.3', + 'string-width@2.1.1', + 'string_decoder@1.1.1', + 'strip-ansi@3.0.1', + 'strip-ansi@4.0.0', + 'strip-json-comments@2.0.1', + 'supports-color@2.0.0', + 'supports-color@5.5.0', + 'table@4.0.2', + 'text-table@0.2.0', + 'through@2.3.8', + 'tmp@0.0.33', + 'to-array@0.1.4', + 'type-check@0.3.2', + 'typedarray@0.0.6', + 'util-deprecate@1.0.2', + 'which@1.3.1', + 'wordwrap@1.0.0', + 'wrappy@1.0.2', + 'write@0.2.1', + 'yallist@2.1.2', + ].sort(), + 'depGraph looks fine', + ); + }, + '`test yarn-package --file=yarn-package/yarn.lock ` sends pkg info & policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test({ file: 'yarn-package/yarn.lock' }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match( + req.body.policy, + 'npm:debug:20170905', + 'policy is found & sent', + ); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), + 'depGraph looks fine', + ); + }, + '`test yarn-package --file=yarn.lock ` sends pkg info & policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('yarn-package', { file: 'yarn.lock' }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match( + req.body.policy, + 'npm:debug:20170905', + 'policy is found & sent', + ); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), + 'depGraph looks fine', + ); + }, + '`test yarn-package` sends pkg info & policy': + (params, utils) => async (t) => { + utils.chdirWorkspaces('yarn-package'); + await params.cli.test(); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match( + req.body.policy, + 'npm:debug:20170905', + 'policy is found & sent', + ); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['npm-package@1.0.0', 'ms@0.7.1', 'debug@2.2.0'].sort(), + 'depGraph looks fine', + ); + }, - '`test yarn-package --file=yarn.lock --dev` sends pkg info': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('yarn-package', { file: 'yarn.lock', dev: true }); - const req = params.server.popRequest(); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - [ - 'npm-package@1.0.0', - 'ms@0.7.1', - 'debug@2.2.0', - 'object-assign@4.1.1', - ].sort(), - 'depGraph looks fine', - ); - }, + '`test yarn-package --file=yarn.lock --dev` sends pkg info': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('yarn-package', { file: 'yarn.lock', dev: true }); + const req = params.server.popRequest(); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + [ + 'npm-package@1.0.0', + 'ms@0.7.1', + 'debug@2.2.0', + 'object-assign@4.1.1', + ].sort(), + 'depGraph looks fine', + ); + }, - '`test yarn-package-with-subfolder --file=yarn.lock ` picks top-level files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('yarn-package-with-subfolder', { - file: 'yarn.lock', - }); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['yarn-package-top-level@1.0.0', 'to-array@0.1.4'].sort(), - 'depGraph looks fine', - ); - }, + '`test yarn-package-with-subfolder --file=yarn.lock ` picks top-level files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('yarn-package-with-subfolder', { + file: 'yarn.lock', + }); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['yarn-package-top-level@1.0.0', 'to-array@0.1.4'].sort(), + 'depGraph looks fine', + ); + }, - '`test yarn-package-with-subfolder --file=subfolder/yarn.lock` picks subfolder files': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces(); - await params.cli.test('yarn-package-with-subfolder', { - file: 'subfolder/yarn.lock', - }); - const req = params.server.popRequest(); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['yarn-package-subfolder@1.0.0', 'to-array@0.1.4'].sort(), - 'depGraph looks fine', - ); - }, + '`test yarn-package-with-subfolder --file=subfolder/yarn.lock` picks subfolder files': + (params, utils) => async (t) => { + utils.chdirWorkspaces(); + await params.cli.test('yarn-package-with-subfolder', { + file: 'subfolder/yarn.lock', + }); + const req = params.server.popRequest(); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['yarn-package-subfolder@1.0.0', 'to-array@0.1.4'].sort(), + 'depGraph looks fine', + ); + }, - '`test` on a yarn package does work and displays appropriate text': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces('yarn-app'); - await params.cli.test(); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['yarn-app-one@1.0.0', 'marked@0.3.6', 'moment@2.18.1'].sort(), - 'depGraph looks fine', - ); - }, + '`test` on a yarn package does work and displays appropriate text': + (params, utils) => async (t) => { + utils.chdirWorkspaces('yarn-app'); + await params.cli.test(); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['yarn-app-one@1.0.0', 'marked@0.3.6', 'moment@2.18.1'].sort(), + 'depGraph looks fine', + ); + }, '`test` on a yarn v2 package': (params, utils) => async (t) => { utils.chdirWorkspaces('yarn-v2'); await params.cli.test(); @@ -336,27 +332,25 @@ export const YarnTests: AcceptanceTests = { ); }, - '`test` on a yarn lock v2 package - uses yarn v3': ( - params, - utils, - ) => async (t) => { - utils.chdirWorkspaces('yarn-lock-v2-vuln'); - await params.cli.test(); - const req = params.server.popRequest(); - t.equal(req.method, 'POST', 'makes POST request'); - t.equal( - req.headers['x-snyk-cli-version'], - params.versionNumber, - 'sends version number', - ); - t.match(req.url, '/test-dep-graph', 'posts to correct url'); - t.match(req.body.targetFile, undefined, 'target is undefined'); - const depGraph = req.body.depGraph; - t.same( - depGraph.pkgs.map((p) => p.id).sort(), - ['yarn-3-vuln@1.0.0', 'lodash@4.17.0'].sort(), - 'depGraph looks fine', - ); - }, + '`test` on a yarn lock v2 package - uses yarn v3': + (params, utils) => async (t) => { + utils.chdirWorkspaces('yarn-lock-v2-vuln'); + await params.cli.test(); + const req = params.server.popRequest(); + t.equal(req.method, 'POST', 'makes POST request'); + t.equal( + req.headers['x-snyk-cli-version'], + params.versionNumber, + 'sends version number', + ); + t.match(req.url, '/test-dep-graph', 'posts to correct url'); + t.match(req.body.targetFile, undefined, 'target is undefined'); + const depGraph = req.body.depGraph; + t.same( + depGraph.pkgs.map((p) => p.id).sort(), + ['yarn-3-vuln@1.0.0', 'lodash@4.17.0'].sort(), + 'depGraph looks fine', + ); + }, }, }; diff --git a/test/tap/cli-test/sarif-schema-2.1.0.js b/test/tap/cli-test/sarif-schema-2.1.0.js index 071247cd17..9173284ef6 100644 --- a/test/tap/cli-test/sarif-schema-2.1.0.js +++ b/test/tap/cli-test/sarif-schema-2.1.0.js @@ -2,8 +2,7 @@ module.exports = { $schema: 'http://json-schema.org/draft-07/schema#', title: 'Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema', - $id: - 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json', + $id: 'https://raw.githubusercontent.com/oasis-tcs/sarif-spec/master/Schemata/sarif-schema-2.1.0.json', description: 'Static Analysis Results Format (SARIF) Version 2.1.0 JSON Schema: a standard format for the output of static analysis tools.', additionalProperties: false, diff --git a/test/tap/container.test.ts b/test/tap/container.test.ts index 20b93423c3..de8ca10ad9 100644 --- a/test/tap/container.test.ts +++ b/test/tap/container.test.ts @@ -59,13 +59,15 @@ const stubMeta: MonitorMeta = { test('isContainer returns false if image name is undefined', (t) => { t.plan(1); - const scannedProject: ScannedProject = stubScannedProjectContainerWithNoImageName(); + const scannedProject: ScannedProject = + stubScannedProjectContainerWithNoImageName(); t.notOk(container.isContainer(scannedProject)); }); test('isContainer returns false if image name is empty', (t) => { t.plan(1); - const scannedProject: ScannedProject = stubScannedProjectContainerWithEmptyImageName(); + const scannedProject: ScannedProject = + stubScannedProjectContainerWithEmptyImageName(); t.notOk(container.isContainer(scannedProject)); }); diff --git a/test/tap/find-files.test.ts b/test/tap/find-files.test.ts index c2d2410631..131f4629cd 100644 --- a/test/tap/find-files.test.ts +++ b/test/tap/find-files.test.ts @@ -2,9 +2,13 @@ import * as path from 'path'; import { test } from 'tap'; import { find } from '../../src/lib/find-files'; import { getFixturePath } from '../jest/util/getFixturePath'; +import { getWorkspacePath } from '../jest/util/getWorkspacePath'; const testFixture = getFixturePath('find-files'); +// eslint-disable-next-line @typescript-eslint/no-unused-vars +const skiptest = (name, _) => console.log(`Skipping ${name}`); + test('find all files in test fixture', async (t) => { // six levels deep to find all const { files: result, allFilesFound } = await find({ @@ -71,6 +75,26 @@ test('find all files in test fixture', async (t) => { ); }); +test('defaults to only detecting files up to 4 layers deep when undefined', async (t) => { + // + const { files: result } = await find({ + path: getWorkspacePath('mono-repo-nested'), + levelsDeep: undefined, + }); + + t.same(result.length, 4); +}); + +test('defaults to only detecting files up to 4 layers deep when null', async (t) => { + // + const { files: result } = await find({ + path: getWorkspacePath('mono-repo-nested'), + levelsDeep: NaN, + }); + + t.same(result.length, 4); +}); + test('find all files in test fixture ignoring node_modules', async (t) => { // six levels deep to ensure node_modules is tested const { files: result } = await find({ diff --git a/test/tap/proxy.test.js b/test/tap/proxy.test.js index e133444c53..ddf060f42a 100644 --- a/test/tap/proxy.test.js +++ b/test/tap/proxy.test.js @@ -31,7 +31,7 @@ test('request respects proxy environment variables', async (t) => { }); process.env.http_proxy = `http://localhost:${proxyPort}`; - const proxy = http.createServer(function(req, res) { + const proxy = http.createServer(function (req, res) { t.equal(req.url, httpRequestHost + requestPath, 'http_proxy url ok'); res.end(); }); @@ -58,7 +58,7 @@ test('request respects proxy environment variables', async (t) => { }); process.env.HTTP_PROXY = `http://localhost:${proxyPort}`; - const proxy = http.createServer(function(req, res) { + const proxy = http.createServer(function (req, res) { t.equal(req.url, httpRequestHost + requestPath, 'HTTP_PROXY url ok'); res.end(); }); @@ -107,7 +107,7 @@ test('request respects proxy environment variables', async (t) => { 'Proxy-agent: Node.js-Proxy\r\n' + 'Connection: close\r\n' + '\r\n', - function() { + function () { cltSocket.end(); }, ); @@ -159,7 +159,7 @@ test('request respects proxy environment variables', async (t) => { 'Proxy-agent: Node.js-Proxy\r\n' + 'Connection: close\r\n' + '\r\n', - function() { + function () { cltSocket.end(); }, ); diff --git a/test/tap/remote-package.test.ts b/test/tap/remote-package.test.ts index 23650e6b3b..54dbcd2659 100644 --- a/test/tap/remote-package.test.ts +++ b/test/tap/remote-package.test.ts @@ -79,10 +79,7 @@ test('multiple test arguments', async (t) => { try { const commandResult: CommandResult = await cli.test('semver@4', 'qs@6'); const res = commandResult.getDisplayResults(); - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.equal( lastLine, 'Tested 2 projects, no vulnerable paths were found.', @@ -97,10 +94,7 @@ test('multiple test arguments', async (t) => { t.fail(res); } catch (error) { const res = error.message; - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.equal( lastLine, 'Tested 2 projects, 1 contained vulnerable paths.', @@ -113,10 +107,7 @@ test('multiple test arguments', async (t) => { t.fail(res); } catch (error) { const res = error.message; - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.equal( lastLine, 'Tested 2 projects, 1 contained vulnerable paths.', @@ -129,10 +120,7 @@ test('multiple test arguments', async (t) => { t.fail(res); } catch (error) { const res = error.message; - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.equal( lastLine, 'Tested 2 projects, 2 contained vulnerable paths.', @@ -147,10 +135,7 @@ test('test for existing remote package with dev-deps only with --dev', async (t) dev: true, }); const res = commandResult.getDisplayResults(); - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.same( lastLine, '✔ Tested lodash@4.17.11 for known vulnerabilities, no vulnerable paths found.', @@ -171,10 +156,7 @@ test('test for existing remote package with dev-deps only', async (t) => { dev: false, }); const res = commandResult.getDisplayResults(); - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.same( lastLine, @@ -193,10 +175,7 @@ test('test for non-existing', async (t) => { t.fail('should fail, instead received ' + res); } catch (error) { const res = error.message; - const lastLine = res - .trim() - .split('\n') - .pop(); + const lastLine = res.trim().split('\n').pop(); t.same( lastLine, 'Internal server error', diff --git a/test/tap/run-test.test.ts b/test/tap/run-test.test.ts index 83f52e705e..5ab808ca80 100644 --- a/test/tap/run-test.test.ts +++ b/test/tap/run-test.test.ts @@ -40,9 +40,9 @@ before('setup', async (t) => { }); test('runTest annotates results with remediation data when using node_modules', async (t) => { - const vulns = require(getFixturePath( - 'npm-package-with-git-url/test-graph-result.json', - )); + const vulns = require( + getFixturePath('npm-package-with-git-url/test-graph-result.json'), + ); server.setNextResponse(vulns); const result = await runTest( diff --git a/test/tap/sub-process.test.js b/test/tap/sub-process.test.js index 7775d8b9f8..e521c4a788 100644 --- a/test/tap/sub-process.test.js +++ b/test/tap/sub-process.test.js @@ -43,56 +43,56 @@ function isSupported() { } } -test('sub-process.execute executes sub processes', function(t) { +test('sub-process.execute executes sub processes', function (t) { if (isSupported()) { - t.test('runs in shell', function(t) { + t.test('runs in shell', function (t) { t.plan(1); subProcess .execute('echo', [shellVar]) - .then(function(result) { + .then(function (result) { t.not(result.trim(), shellVar, 'evaluates shell variable'); }) .catch(t.fail); }); } - t.test('successful execution', function(t) { + t.test('successful execution', function (t) { t.plan(2); subProcess .execute(script('stdout-echo'), ['hello world']) - .then(function(result) { + .then(function (result) { t.match(result, 'hello world', 'should resolve with stdout'); }) .catch(t.fail); subProcess .execute(script('stderr-echo'), ['hello error']) - .then(function(result) { + .then(function (result) { t.match(result, 'hello error', 'should resolve with stderr'); }) .catch(t.fail); }); - t.test('error during execution', function(t) { + t.test('error during execution', function (t) { t.plan(2); subProcess .execute(script('stdout-echo-fail'), ['hello world']) - .then(function() { + .then(function () { t.fail('should not have resolved'); }) - .catch(function(err) { + .catch(function (err) { t.match(err, 'hello world', 'should reject with standard output'); }); subProcess .execute(script('stderr-echo-fail'), ['hello error']) - .then(function() { + .then(function () { t.fail('should not have resolved'); }) - .catch(function(err) { + .catch(function (err) { t.match( err, 'hello error', @@ -101,14 +101,14 @@ test('sub-process.execute executes sub processes', function(t) { }); }); - t.test('options', function(t) { - t.test('options.cwd', function(t) { + t.test('options', function (t) { + t.test('options.cwd', function (t) { t.plan(2); const explicitWorkDir = path.resolve(path.join(__dirname, 'support')); subProcess .execute(script('pwd'), [], { cwd: explicitWorkDir }) - .then(function(result) { + .then(function (result) { t.match(result, explicitWorkDir, 'specifies the working directory'); }) .catch(t.fail); @@ -116,7 +116,7 @@ test('sub-process.execute executes sub processes', function(t) { const currentWorkDir = process.cwd(); subProcess .execute(script('pwd'), []) - .then(function(result) { + .then(function (result) { t.match( result, currentWorkDir, diff --git a/test/tap/user-config.test.ts b/test/tap/user-config.test.ts index a12d1f0053..2dd2b02b8f 100644 --- a/test/tap/user-config.test.ts +++ b/test/tap/user-config.test.ts @@ -9,26 +9,26 @@ test('can unset config values', async (t) => { config('foo' as any).catch(t.pass); await config() - .then(function(v) { + .then(function (v) { before = v; return config('set', 'foo=10'); }) - .then(function(v) { + .then(function (v) { t.pass('value set ' + v); return config('get', 'foo'); }) - .then(function(value) { + .then(function (value) { t.equal(value, '10', 'got value from config'); return config('unset', 'foo'); }) - .then(function() { + .then(function () { return config(); }) - .then(function(all) { + .then(function (all) { t.equal(before, all, 'final config matches'); config('unset', 'bar'); }) - .catch(function(e) { + .catch(function (e) { t.fail(e); }); }); @@ -41,26 +41,26 @@ test('can set config values with = inside', async (t) => { config('foo' as any).catch(t.pass); await config() - .then(function(v) { + .then(function (v) { before = v; return config('set', 'foo=10='); }) - .then(function(v) { + .then(function (v) { t.pass('value set ' + v); return config('get', 'foo'); }) - .then(function(value) { + .then(function (value) { t.equal(value, '10=', 'got value from config'); return config('unset', 'foo'); }) - .then(function() { + .then(function () { return config(); }) - .then(function(all) { + .then(function (all) { t.equal(before, all, 'final config matches'); config('unset', 'bar'); }) - .catch(function(e) { + .catch(function (e) { t.fail(e); }); }); diff --git a/test/tap/vulnerable-path-output.js b/test/tap/vulnerable-path-output.js index 85da036ee2..6389389f37 100644 --- a/test/tap/vulnerable-path-output.js +++ b/test/tap/vulnerable-path-output.js @@ -7,32 +7,32 @@ const cli = require('../cli/commands'); const snyk = require('..'); const { getFixturePath } = require('../jest/util/getFixturePath'); -sinon.stub(snyk, 'test', function() { +sinon.stub(snyk, 'test', function () { return require(getFixturePath('more-vuln-paths-than-vulns')); }); -tap.tearDown(function() { +tap.tearDown(function () { snyk.test.restore(); }); -test('"snyk test --show-vulnerable-paths=false"', function(t) { +test('"snyk test --show-vulnerable-paths=false"', function (t) { const options = { 'show-vulnerable-paths': 'false' }; return cli .test('more-vuln-paths-than-vulns', options) - .then(function() { + .then(function () { t.fail('Should have found vulns!'); }) - .catch(function(res) { + .catch(function (res) { const vulnUrls = res.message .match(/^- info: (.*)$/gm) - .map(function(result) { + .map(function (result) { return result.replace(/^- info:\s*/, ''); }); t.assert( _(vulnUrls) .countBy() // count the occurrances of each vulnUrl .values() - .every(function(occurances) { + .every(function (occurances) { return occurances === 1; }), 'displays each vuln only once', @@ -49,23 +49,23 @@ test('"snyk test --show-vulnerable-paths=false"', function(t) { }); }); -test('"snyk test"', function(t) { +test('"snyk test"', function (t) { return cli .test('more-vuln-paths-than-vulns') - .then(function() { + .then(function () { t.fail('Should have found vulns!'); }) - .catch(function(res) { + .catch(function (res) { const vulnUrls = res.message .match(/^- info: (.*)$/gm) - .map(function(result) { + .map(function (result) { return result.replace(/^- info:\s*/, ''); }); t.assert( _(vulnUrls) .countBy() // count the occurrances of each vulnUrl .values() - .some(function(occurances) { + .some(function (occurances) { return occurances > 1; }), 'duplicates vuln data for each vulnerable-path', diff --git a/ts-binary-wrapper/src/common.ts b/ts-binary-wrapper/src/common.ts index 46ef326bcd..13ef64c9e3 100644 --- a/ts-binary-wrapper/src/common.ts +++ b/ts-binary-wrapper/src/common.ts @@ -82,10 +82,7 @@ export function determineBinaryName(platform: string, arch: string): string { let isAlpine = false; try { const result = spawnSync('cat /etc/os-release', { shell: true }); - isAlpine = result.stdout - .toString() - .toLowerCase() - .includes('id=alpine'); + isAlpine = result.stdout.toString().toLowerCase().includes('id=alpine'); } catch { isAlpine = false; } @@ -260,7 +257,7 @@ export function downloadExecutable( filename: string, filenameShasum: string, ): Promise { - return new Promise(function(resolve) { + return new Promise(function (resolve) { logErrorWithTimeStamps('Starting download'); const options = new URL(`${downloadUrl}?utm_source=${integrationName}`); const temp = path.join(__dirname, Date.now().toString()); @@ -384,8 +381,7 @@ export async function logError( // init error reporting const version = getCurrentVersion(versionFile); Sentry.init({ - dsn: - 'https://3e845233db8c4f43b4c4b9245f1d7bd6@o30291.ingest.sentry.io/4504599528079360', + dsn: 'https://3e845233db8c4f43b4c4b9245f1d7bd6@o30291.ingest.sentry.io/4504599528079360', release: version, }); diff --git a/ts-binary-wrapper/test/acceptance/basic.spec.ts b/ts-binary-wrapper/test/acceptance/basic.spec.ts index 1f4431c1b4..4e3d915ac6 100644 --- a/ts-binary-wrapper/test/acceptance/basic.spec.ts +++ b/ts-binary-wrapper/test/acceptance/basic.spec.ts @@ -9,7 +9,7 @@ jest.setTimeout(60 * 1000); describe('Basic acceptance test', () => { const envSetup = new TestEnvironmentSetup(); const cliVersionForTesting = - '1.1228.0-dev.b6d3a5aed7033dd2fe9fcc1330effeca0e4250b2'; + '1.1294.0-dev.fb7d0fdb0ab3beb8af7142c84ded754b568ba2f4'; beforeEach(async () => { process.env.SNYK_DISABLE_ANALYTICS = '1'; @@ -105,12 +105,9 @@ describe('Basic acceptance test', () => { expect(resultIndex.status).toEqual(0); // The binary wrapper should not output anything to stdout // Assert the only stdout is from the CLI --version flag - expect( - resultIndex.stdout - .toString() - .split(' ')[0] - .trim(), - ).toEqual(cliVersionForTesting); + expect(resultIndex.stdout.toString().split(' ')[0].trim()).toEqual( + cliVersionForTesting, + ); fs.unlinkSync(executable); }); diff --git a/ts-binary-wrapper/test/util/prepareEnvironment.ts b/ts-binary-wrapper/test/util/prepareEnvironment.ts index 65e45ff594..5b190bd4fd 100644 --- a/ts-binary-wrapper/test/util/prepareEnvironment.ts +++ b/ts-binary-wrapper/test/util/prepareEnvironment.ts @@ -69,7 +69,7 @@ export class TestEnvironmentSetup { private async downloadShaSum(version: string, shasumFile: string) { await common.downloadExecutable( - 'https://static.snyk.io/cli/v' + version + '/sha256sums.txt.asc', + 'https://downloads.snyk.io/cli/v' + version + '/sha256sums.txt.asc', shasumFile, '', ); @@ -77,7 +77,7 @@ export class TestEnvironmentSetup { } if (process.argv.includes('exec')) { - (async function() { + (async function () { const env = new TestEnvironmentSetup(); await env.prepareEnvironment('1.1292.1'); })();