diff --git a/development.md b/development.md index 3a39a4c8..85d8f03f 100644 --- a/development.md +++ b/development.md @@ -4,14 +4,14 @@ To use and debug package locally you don't need publish it to NPM registry: ```shell script $ cd -$ npm install && npm run build && npx yalc publish +$ npm install && npm run build && npm link ``` After that you have to create symlink to your package in your project folder: ```shell script $ cd -$ npx yalc add @snyk/code-client +$ npm link @snyk/code-client ``` ## Publishing diff --git a/package.json b/package.json index 4d351f57..5e5b8dc2 100644 --- a/package.json +++ b/package.json @@ -58,8 +58,7 @@ "prettier": "^2.1.1", "ts-jest": "^26.3.0", "typescript": "^4.0.2", - "write": "^2.0.0", - "yalc": "^1.0.0-pre.53" + "write": "^2.0.0" }, "dependencies": { "@deepcode/dcignore": "^1.0.4", diff --git a/src/analysis.ts b/src/analysis.ts index cf9e3ea3..d263e79b 100644 --- a/src/analysis.ts +++ b/src/analysis.ts @@ -1,6 +1,8 @@ /* eslint-disable no-await-in-loop */ +import omit from 'lodash.omit'; -import { AnalyzeFoldersOptions, prepareExtendingBundle } from './files'; +import { AnalyzeFoldersOptions, prepareExtendingBundle, resolveBundleFilePath } from './files'; +import { POLLING_INTERVAL } from './constants'; import { GetAnalysisErrorCodes, getAnalysis, @@ -12,9 +14,10 @@ import { ConnectionOptions, GetAnalysisOptions, } from './http'; +import { fromEntries } from './lib/utils'; import { createBundleFromFolders, FileBundle, remoteBundleFactory } from './bundles'; -import emitter from './emitter'; -import { AnalysisResult } from './interfaces/analysis-result.interface'; +import { emitter } from './emitter'; +import { AnalysisResult, AnalysisResultLegacy, AnalysisResultSarif, AnalysisFiles, Suggestion } from './interfaces/analysis-result.interface'; const sleep = (duration: number) => new Promise(resolve => setTimeout(resolve, duration)); @@ -56,7 +59,7 @@ async function pollAnalysis( return analysisResponse as Result; } - await sleep(500); + await sleep(POLLING_INTERVAL); } } @@ -73,17 +76,17 @@ export async function analyzeBundle(options: GetAnalysisOptions): Promise { -// const filePath = resolveBundleFilePath(baseDir, path); -// return [filePath, positions]; -// }), -// ); -// } -// return files; -// } +function normalizeResultFiles(files: AnalysisFiles, baseDir: string): AnalysisFiles { + if (baseDir) { + return fromEntries( + Object.entries(files).map(([path, positions]) => { + const filePath = resolveBundleFilePath(baseDir, path); + return [filePath, positions]; + }), + ); + } + return files; +} interface FileAnalysisOptions { connection: ConnectionOptions; @@ -91,7 +94,7 @@ interface FileAnalysisOptions { fileOptions: AnalyzeFoldersOptions; } -interface FileAnalysis extends FileAnalysisOptions { +export interface FileAnalysis extends FileAnalysisOptions { fileBundle: FileBundle; analysisResults: AnalysisResult; } @@ -109,18 +112,36 @@ export async function analyzeFolders(options: FileAnalysisOptions): Promise( + suggestionIndex: number, + suggestions: { [index: string]: T }, +): { [index: string]: T } => { + const entries = Object.entries(suggestions); + return fromEntries( + entries.map(([i, s]) => { + return [`${parseInt(i, 10) + suggestionIndex + 1}`, s]; + }), + ); +}; + +function mergeLegacyResults( + oldAnalysisResults: AnalysisResultLegacy, + newAnalysisResults: AnalysisResultLegacy, + limitToFiles: string[], + removedFiles: string[] = [], + baseDir: string, + ): AnalysisResultLegacy { + + // expand relative file names to absolute ones only for legacy results + newAnalysisResults.files = normalizeResultFiles(newAnalysisResults.files, baseDir); + + // Determine max suggestion index in our data + const suggestionIndex = Math.max(...Object.keys(oldAnalysisResults.suggestions).map(i => parseInt(i, 10))) || -1; + + // Addup all new suggestions' indexes + const newSuggestions = moveSuggestionIndexes(suggestionIndex, newAnalysisResults.suggestions); + const suggestions = { ...oldAnalysisResults.suggestions, ...newSuggestions }; + + const newFiles = fromEntries( + Object.entries(newAnalysisResults.files).map(([fn, s]) => { + return [fn, moveSuggestionIndexes(suggestionIndex, s)]; + }), + ); + + // expand relative file names to absolute ones only for legacy results + const changedFiles = [...limitToFiles, ...removedFiles].map(path => resolveBundleFilePath(baseDir, path)); + + const files = { + ...omit(oldAnalysisResults.files, changedFiles), + ...newFiles, + }; + + return { + ...newAnalysisResults, + files, + suggestions, + }; +} + interface ExtendAnalysisOptions extends FileAnalysis { files: string[]; } @@ -234,10 +306,7 @@ export async function extendAnalysis(options: ExtendAnalysisOptions): Promise= 200 && response.statusCode < 300); if (success) return { success, response }; - + // Try to avoid breaking requests due to temporary network errors if (attempts > 1 && response.statusCode && [ ErrorCodes.serviceUnavailable, diff --git a/tests/analysis.spec.ts b/tests/analysis.spec.ts index 673045b8..889067d2 100644 --- a/tests/analysis.spec.ts +++ b/tests/analysis.spec.ts @@ -1,11 +1,11 @@ import path from 'path'; import jsonschema from 'jsonschema'; -import { analyzeFolders, extendAnalysis } from '../src/analysis'; +import { analyzeFolders, extendAnalysis, FileAnalysis } from '../src/analysis'; import { uploadRemoteBundle } from '../src/bundles'; import { baseURL, sessionToken, source, TEST_TIMEOUT } from './constants/base'; import { sampleProjectPath, bundleFiles, bundleFilesFull, bundleExtender } from './constants/sample'; -import emitter from '../src/emitter'; +import { emitter } from '../src/emitter'; import { AnalysisResponseProgress } from '../src/http'; import { SupportedFiles } from '../src/interfaces/files.interface'; import { AnalysisSeverity } from '../src/interfaces/analysis-options.interface'; @@ -66,6 +66,9 @@ describe('Functional test of analysis', () => { expect(bundle).toBeTruthy(); if (!bundle) return; // TS trick + expect(bundle.analysisResults.type === 'sarif').toBeTruthy(); + if (bundle.analysisResults.type !== 'sarif') return; + expect(bundle.analysisResults.sarif.runs[0].tool.driver.rules?.length).toEqual(7); expect(bundle.analysisResults.sarif.runs[0].results?.length).toEqual(12); const sampleRes = bundle.analysisResults.sarif.runs[0].results!.find( @@ -152,6 +155,28 @@ describe('Functional test of analysis', () => { TEST_TIMEOUT, ); + it('analyze folder legacy json results', async () => { + const bundle = await analyzeFolders({ + connection: { baseURL, sessionToken, source }, + analysisOptions: { severity: AnalysisSeverity.info, prioritized: true, legacy: true }, + fileOptions: { + paths: [sampleProjectPath], + symlinksEnabled: false, + maxPayload: 1000, + defaultFileIgnores: undefined, + }, + }); + + expect(bundle).toBeTruthy(); + if (!bundle) return; // TS trick + + expect(bundle.analysisResults.type === 'legacy').toBeTruthy(); + if (bundle.analysisResults.type !== 'legacy') return; + + expect(Object.keys(bundle.analysisResults.files)).toHaveLength(5); + expect(Object.keys(bundle.analysisResults.suggestions)).toHaveLength(8); + }); + it('analyze folder - with sarif returned', async () => { const bundle = await analyzeFolders({ connection: { baseURL, sessionToken, source }, @@ -167,6 +192,9 @@ describe('Functional test of analysis', () => { expect(bundle).toBeTruthy(); if (!bundle) return; // TS trick + expect(bundle.analysisResults.type === 'sarif').toBeTruthy(); + if (bundle.analysisResults.type !== 'sarif') return; + const validationResult = jsonschema.validate(bundle.analysisResults.sarif, sarifSchema); expect(validationResult.errors.length).toEqual(0); }); @@ -189,7 +217,7 @@ describe('Functional test of analysis', () => { it( 'extend folder analysis', async () => { - const bundle = await analyzeFolders({ + const fileAnalysis = await analyzeFolders({ connection: { baseURL, sessionToken, source }, analysisOptions: { severity: 1, @@ -201,8 +229,14 @@ describe('Functional test of analysis', () => { }, }); - expect(bundle).toBeTruthy(); - if (!bundle) return; // TS trick + expect(fileAnalysis).toBeTruthy(); + if (!fileAnalysis) return; // TS trick + + expect(fileAnalysis.analysisResults.type === 'sarif').toBeTruthy(); + if (fileAnalysis.analysisResults.type !== 'sarif') return; + + expect(fileAnalysis.analysisResults.sarif.runs[0].tool.driver.rules).toHaveLength(7); + expect(fileAnalysis.analysisResults.sarif.runs[0].results).toHaveLength(12); const extender = await bundleExtender(); type Awaited = T extends PromiseLike ? Awaited : T; @@ -210,7 +244,7 @@ describe('Functional test of analysis', () => { try { await extender.exec(); extendedBundle = await extendAnalysis({ - ...bundle, + ...fileAnalysis, files: extender.files.all, }); } catch (err) { @@ -222,10 +256,15 @@ describe('Functional test of analysis', () => { expect(extendedBundle).toBeTruthy(); if (!extendedBundle) return; // TS trick - expect(extendedBundle.analysisResults.sarif.runs[0].tool.driver.rules?.length).toEqual(5); - expect(extendedBundle.analysisResults.sarif.runs[0].results?.length).toEqual(10); + expect(extendedBundle.analysisResults.type === 'sarif').toBeTruthy(); + if (extendedBundle.analysisResults.type !== 'sarif') return; + + const sarifResults = extendedBundle.analysisResults.sarif; + + expect(sarifResults.runs[0].tool.driver.rules).toHaveLength(5); + expect(sarifResults.runs[0].results).toHaveLength(10); const getRes = (path: string) => - extendedBundle!.analysisResults.sarif.runs[0].results!.find( + sarifResults.runs[0].results!.find( res => res.locations?.[0].physicalLocation?.artifactLocation?.uri === path, ); const sampleRes = getRes(extender.files.added); @@ -238,11 +277,11 @@ describe('Functional test of analysis', () => { expect(sampleRes.ruleIndex).toBeDefined(); if (!sampleRes.ruleIndex) return; // TS trick expect(sampleRes!.ruleId).toEqual( - extendedBundle.analysisResults.sarif.runs[0].tool.driver.rules![sampleRes!.ruleIndex!].id, + sarifResults.runs[0].tool.driver.rules![sampleRes!.ruleIndex!].id, ); - expect(bundle.analysisResults.timing.analysis).toBeGreaterThanOrEqual( - bundle.analysisResults.timing.fetchingCode, + expect(extendedBundle.analysisResults.timing.analysis).toBeGreaterThanOrEqual( + extendedBundle.analysisResults.timing.fetchingCode, ); expect(extendedBundle.analysisResults.timing.queue).toBeGreaterThanOrEqual(0); expect(new Set(extendedBundle.analysisResults.coverage)).toEqual( diff --git a/tests/api.spec.ts b/tests/api.spec.ts index 8f4d8304..a2d9c28a 100644 --- a/tests/api.spec.ts +++ b/tests/api.spec.ts @@ -274,7 +274,7 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); - if (response.value.status === AnalysisStatus.complete) { + if (response.value.status === AnalysisStatus.complete && response.value.type === 'sarif' ) { expect(response.value.sarif.runs[0].results).toHaveLength(12); expect(new Set(response.value.coverage)).toEqual( @@ -318,6 +318,10 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); } while (response.value.status !== AnalysisStatus.complete); + + expect(response.value.type === 'sarif').toBeTruthy(); + if (response.value.type !== 'sarif') return; + expect(response.value.sarif.runs[0].results).toHaveLength(8); // Get analysis results with severity 3 @@ -333,6 +337,10 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); } while (response.value.status !== AnalysisStatus.complete); + + expect(response.value.type === 'sarif').toBeTruthy(); + if (response.value.type !== 'sarif') return; + expect(response.value.sarif.runs[0].results).toHaveLength(4); }, TEST_TIMEOUT, diff --git a/tests/constants/sample.ts b/tests/constants/sample.ts index 4045f395..8f1a97f2 100644 --- a/tests/constants/sample.ts +++ b/tests/constants/sample.ts @@ -64,6 +64,7 @@ export const bundleExtender: () => Promise<{ ); const original = changedFiles.map(path => fBundle.find(f => f.filePath === path)?.content); if (original.some(c => !c)) throw new Error('Content not found. Impossible to restore'); + return { files: { removed: changedFilesNames[0],