From 0d6b683c5f2cb55b899229fe5cdbaf03221231a1 Mon Sep 17 00:00:00 2001 From: Arvyd Paeglit <arvyd.paeglit@snyk.io> Date: Mon, 27 Sep 2021 13:17:26 +0000 Subject: [PATCH] feat: added support for legacy results format and adjusted for VSC removed yalc and updated doc with recommendation to use npm link instead --- development.md | 4 +- package.json | 3 +- src/analysis.ts | 117 ++++++++++++++++---- src/bundles.ts | 2 +- src/constants.ts | 1 + src/emitter.ts | 3 +- src/http.ts | 5 +- src/index.ts | 24 ++-- src/interfaces/analysis-result.interface.ts | 75 ++++++++++++- src/needle.ts | 6 +- tests/analysis.spec.ts | 63 +++++++++-- tests/api.spec.ts | 10 +- tests/constants/sample.ts | 1 + 13 files changed, 254 insertions(+), 60 deletions(-) diff --git a/development.md b/development.md index 3a39a4c8..85d8f03f 100644 --- a/development.md +++ b/development.md @@ -4,14 +4,14 @@ To use and debug package locally you don't need publish it to NPM registry: ```shell script $ cd <package-location> -$ npm install && npm run build && npx yalc publish +$ npm install && npm run build && npm link ``` After that you have to create symlink to your package in your project folder: ```shell script $ cd <project-location> -$ npx yalc add @snyk/code-client +$ npm link @snyk/code-client ``` ## Publishing diff --git a/package.json b/package.json index 4d351f57..5e5b8dc2 100644 --- a/package.json +++ b/package.json @@ -58,8 +58,7 @@ "prettier": "^2.1.1", "ts-jest": "^26.3.0", "typescript": "^4.0.2", - "write": "^2.0.0", - "yalc": "^1.0.0-pre.53" + "write": "^2.0.0" }, "dependencies": { "@deepcode/dcignore": "^1.0.4", diff --git a/src/analysis.ts b/src/analysis.ts index cf9e3ea3..d263e79b 100644 --- a/src/analysis.ts +++ b/src/analysis.ts @@ -1,6 +1,8 @@ /* eslint-disable no-await-in-loop */ +import omit from 'lodash.omit'; -import { AnalyzeFoldersOptions, prepareExtendingBundle } from './files'; +import { AnalyzeFoldersOptions, prepareExtendingBundle, resolveBundleFilePath } from './files'; +import { POLLING_INTERVAL } from './constants'; import { GetAnalysisErrorCodes, getAnalysis, @@ -12,9 +14,10 @@ import { ConnectionOptions, GetAnalysisOptions, } from './http'; +import { fromEntries } from './lib/utils'; import { createBundleFromFolders, FileBundle, remoteBundleFactory } from './bundles'; -import emitter from './emitter'; -import { AnalysisResult } from './interfaces/analysis-result.interface'; +import { emitter } from './emitter'; +import { AnalysisResult, AnalysisResultLegacy, AnalysisResultSarif, AnalysisFiles, Suggestion } from './interfaces/analysis-result.interface'; const sleep = (duration: number) => new Promise(resolve => setTimeout(resolve, duration)); @@ -56,7 +59,7 @@ async function pollAnalysis( return analysisResponse as Result<AnalysisFailedResponse, GetAnalysisErrorCodes>; } - await sleep(500); + await sleep(POLLING_INTERVAL); } } @@ -73,17 +76,17 @@ export async function analyzeBundle(options: GetAnalysisOptions): Promise<Analys return analysisData.value; } -// function normalizeResultFiles(files: AnalysisFiles, baseDir: string): AnalysisFiles { -// if (baseDir) { -// return fromEntries( -// Object.entries(files).map(([path, positions]) => { -// const filePath = resolveBundleFilePath(baseDir, path); -// return [filePath, positions]; -// }), -// ); -// } -// return files; -// } +function normalizeResultFiles(files: AnalysisFiles, baseDir: string): AnalysisFiles { + if (baseDir) { + return fromEntries( + Object.entries(files).map(([path, positions]) => { + const filePath = resolveBundleFilePath(baseDir, path); + return [filePath, positions]; + }), + ); + } + return files; +} interface FileAnalysisOptions { connection: ConnectionOptions; @@ -91,7 +94,7 @@ interface FileAnalysisOptions { fileOptions: AnalyzeFoldersOptions; } -interface FileAnalysis extends FileAnalysisOptions { +export interface FileAnalysis extends FileAnalysisOptions { fileBundle: FileBundle; analysisResults: AnalysisResult; } @@ -109,18 +112,36 @@ export async function analyzeFolders(options: FileAnalysisOptions): Promise<File ...options.connection, ...options.analysisOptions, }); - // TODO: expand relative file names to absolute ones - // analysisResults.files = normalizeResultFiles(analysisData.analysisResults.files, baseDir); + + if (analysisResults.type === 'legacy') { + // expand relative file names to absolute ones only for legacy results + analysisResults.files = normalizeResultFiles(analysisResults.files, fileBundle.baseDir); + } return { fileBundle, analysisResults, ...options }; } -function mergeBundleResults( - oldAnalysisResults: AnalysisResult, +function mergeBundleResults(oldAnalysisResults: AnalysisResult, newAnalysisResults: AnalysisResult, limitToFiles: string[], removedFiles: string[] = [], + baseDir: string, ): AnalysisResult { + + if (newAnalysisResults.type == 'sarif') { + return mergeSarifResults(oldAnalysisResults as AnalysisResultSarif, newAnalysisResults, limitToFiles, removedFiles); + } + + return mergeLegacyResults(oldAnalysisResults as AnalysisResultLegacy, newAnalysisResults, limitToFiles, removedFiles, baseDir); + +} + +function mergeSarifResults( + oldAnalysisResults: AnalysisResultSarif, + newAnalysisResults: AnalysisResultSarif, + limitToFiles: string[], + removedFiles: string[] = [], +): AnalysisResultSarif { // Start from the new analysis results // For each finding of the old analysis, // if it's location is not part of the limitToFiles or removedFiles (removedFiles should also be checked against condeFlow), @@ -189,6 +210,57 @@ function mergeBundleResults( return newAnalysisResults; } +const moveSuggestionIndexes = <T>( + suggestionIndex: number, + suggestions: { [index: string]: T }, +): { [index: string]: T } => { + const entries = Object.entries(suggestions); + return fromEntries( + entries.map(([i, s]) => { + return [`${parseInt(i, 10) + suggestionIndex + 1}`, s]; + }), + ); +}; + +function mergeLegacyResults( + oldAnalysisResults: AnalysisResultLegacy, + newAnalysisResults: AnalysisResultLegacy, + limitToFiles: string[], + removedFiles: string[] = [], + baseDir: string, + ): AnalysisResultLegacy { + + // expand relative file names to absolute ones only for legacy results + newAnalysisResults.files = normalizeResultFiles(newAnalysisResults.files, baseDir); + + // Determine max suggestion index in our data + const suggestionIndex = Math.max(...Object.keys(oldAnalysisResults.suggestions).map(i => parseInt(i, 10))) || -1; + + // Addup all new suggestions' indexes + const newSuggestions = moveSuggestionIndexes<Suggestion>(suggestionIndex, newAnalysisResults.suggestions); + const suggestions = { ...oldAnalysisResults.suggestions, ...newSuggestions }; + + const newFiles = fromEntries( + Object.entries(newAnalysisResults.files).map(([fn, s]) => { + return [fn, moveSuggestionIndexes(suggestionIndex, s)]; + }), + ); + + // expand relative file names to absolute ones only for legacy results + const changedFiles = [...limitToFiles, ...removedFiles].map(path => resolveBundleFilePath(baseDir, path)); + + const files = { + ...omit(oldAnalysisResults.files, changedFiles), + ...newFiles, + }; + + return { + ...newAnalysisResults, + files, + suggestions, + }; +} + interface ExtendAnalysisOptions extends FileAnalysis { files: string[]; } @@ -234,10 +306,7 @@ export async function extendAnalysis(options: ExtendAnalysisOptions): Promise<Fi limitToFiles, }); - // TODO: Transform relative paths into absolute - // analysisData.analysisResults.files = normalizeResultFiles(analysisData.analysisResults.files, bundle.baseDir); - - analysisResults = mergeBundleResults(options.analysisResults, analysisResults, limitToFiles, removedFiles); + analysisResults = mergeBundleResults(options.analysisResults, analysisResults, limitToFiles, removedFiles, options.fileBundle.baseDir); return { ...options, fileBundle, analysisResults }; } diff --git a/src/bundles.ts b/src/bundles.ts index c5233f20..106d0d16 100644 --- a/src/bundles.ts +++ b/src/bundles.ts @@ -28,7 +28,7 @@ import { } from './http'; import { MAX_PAYLOAD, MAX_UPLOAD_ATTEMPTS, UPLOAD_CONCURRENCY } from './constants'; -import emitter from './emitter'; +import { emitter } from './emitter'; type BundleErrorCodes = CreateBundleErrorCodes | CheckBundleErrorCodes | ExtendBundleErrorCodes; diff --git a/src/constants.ts b/src/constants.ts index 81c0965a..2288ded9 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -12,6 +12,7 @@ export const EXCLUDED_NAMES = [GIT_FILENAME, GITIGNORE_FILENAME, DCIGNORE_FILENA export const CACHE_KEY = '.dccache'; export const MAX_UPLOAD_ATTEMPTS = 5; export const UPLOAD_CONCURRENCY = 5; +export const POLLING_INTERVAL = 500; export const MAX_RETRY_ATTEMPTS = 5; // Request retries on network errors export const REQUEST_RETRY_DELAY = 30 * 1000; // 30 seconds delay between retries diff --git a/src/emitter.ts b/src/emitter.ts index 54482b57..a56bf1bc 100644 --- a/src/emitter.ts +++ b/src/emitter.ts @@ -46,5 +46,4 @@ export class EmitterDC extends EventEmitter { } } -const emitter = new EmitterDC(); -export default emitter; +export const emitter = new EmitterDC(); diff --git a/src/http.ts b/src/http.ts index 8fb02c74..35ba6641 100644 --- a/src/http.ts +++ b/src/http.ts @@ -4,7 +4,6 @@ import pick from 'lodash.pick'; import { ErrorCodes, GenericErrorTypes, DEFAULT_ERROR_MESSAGES } from './constants'; import { BundleFiles, SupportedFiles } from './interfaces/files.interface'; -// import { AnalysisSeverity } from './interfaces/analysis-options.interface'; import { AnalysisResult } from './interfaces/analysis-result.interface'; import { makeRequest, Payload } from './needle'; @@ -386,6 +385,7 @@ export interface AnalysisOptions { readonly severity?: number; readonly limitToFiles?: string[]; readonly prioritized?: boolean; + readonly legacy?: boolean; } export interface GetAnalysisOptions extends ConnectionOptions, AnalysisOptions { @@ -408,8 +408,7 @@ export async function getAnalysis( hash: options.bundleHash, limitToFiles: options.limitToFiles || [], }, - ...pick(options, ['severity', 'prioritized']), - // severity: options.severity || AnalysisSeverity.info, + ...pick(options, ['severity', 'prioritized', 'legacy']), }, }; diff --git a/src/index.ts b/src/index.ts index ca948deb..2a6b2415 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,25 +1,33 @@ -import { analyzeFolders } from './analysis'; +import { analyzeFolders, extendAnalysis, FileAnalysis } from './analysis'; import { createBundleFromFolders } from './bundles'; -import emitter from './emitter'; -import { startSession, checkSession, getAnalysis } from './http'; +import { emitter } from './emitter'; +import { startSession, checkSession, getAnalysis, getIpFamily, IpFamily } from './http'; import * as constants from './constants'; import { getGlobPatterns } from './files'; import { SupportedFiles } from './interfaces/files.interface'; import { AnalysisSeverity } from './interfaces/analysis-options.interface'; -import { AnalysisResult } from './interfaces/analysis-result.interface'; +import { AnalysisResult, AnalysisResultLegacy, FilePath, FileSuggestion, Suggestion, Marker } from './interfaces/analysis-result.interface'; export { getGlobPatterns, analyzeFolders, createBundleFromFolders, - // extendAnalysis, - getAnalysis, - startSession, - checkSession, + extendAnalysis, emitter, constants, AnalysisSeverity, AnalysisResult, + AnalysisResultLegacy, SupportedFiles, + FileAnalysis, + FilePath, + FileSuggestion, + Suggestion, + Marker, + getAnalysis, + startSession, + checkSession, + getIpFamily, + IpFamily, }; diff --git a/src/interfaces/analysis-result.interface.ts b/src/interfaces/analysis-result.interface.ts index 22e67c66..8ba64a51 100644 --- a/src/interfaces/analysis-result.interface.ts +++ b/src/interfaces/analysis-result.interface.ts @@ -1,4 +1,5 @@ import { Log } from 'sarif'; +import { AnalysisSeverity } from './analysis-options.interface'; interface Coverage { files: number; @@ -6,8 +7,7 @@ interface Coverage { lang: string; } -export interface AnalysisResult { - sarif: Log; +interface AnalysisResultBase { timing: { fetchingCode: number; analysis: number; @@ -16,3 +16,74 @@ export interface AnalysisResult { coverage: Coverage[]; status: 'COMPLETE'; } + +export interface AnalysisResultSarif extends AnalysisResultBase { + type: 'sarif'; + sarif: Log; +} + +export interface Position { + cols: Point; + rows: Point; +} + +export interface MarkerPosition extends Position { + file: string; +} + +export type Point = [number, number]; + +export interface Marker { + msg: Point; + pos: MarkerPosition[]; +} + +export interface FileSuggestion extends Position { + markers?: Marker[]; +} + +export interface FilePath { + [suggestionIndex: string]: FileSuggestion[]; +} + +export interface AnalysisFiles { + [filePath: string]: FilePath; +} + +interface CommitChangeLine { + line: string; + lineNumber: number; + lineChange: 'removed' | 'added' | 'none'; +} + +interface ExampleCommitFix { + commitURL: string; + lines: CommitChangeLine[]; +} + +export interface Suggestion { + id: string; + message: string; + severity: AnalysisSeverity; + leadURL?: string; + rule: string; + tags: string[]; + categories: string[]; + repoDatasetSize: number; + exampleCommitDescriptions: string[]; + exampleCommitFixes: ExampleCommitFix[]; + cwe: string[]; + title: string; + text: string; +} + +export interface Suggestions { + [suggestionIndex: string]: Suggestion; +} +export interface AnalysisResultLegacy extends AnalysisResultBase { + type: 'legacy'; + suggestions: Suggestions; + files: AnalysisFiles; +} + +export type AnalysisResult = AnalysisResultSarif | AnalysisResultLegacy; diff --git a/src/needle.ts b/src/needle.ts index 3c1d5f9f..f3d25d6d 100644 --- a/src/needle.ts +++ b/src/needle.ts @@ -3,7 +3,7 @@ import needle from 'needle'; import * as querystring from 'querystring'; import https from 'https'; import { URL } from 'url'; -import emitter from './emitter'; +import { emitter } from './emitter'; import { ErrorCodes, @@ -78,10 +78,10 @@ export async function makeRequest(payload: Payload): Promise<{ success: boolean; do { try { response = await needle(method, url, data, options); - emitter.apiRequestLog(`<= Response: ${response.statusCode} ${JSON.stringify(response.body)}`.slice(0, 399)); + emitter.apiRequestLog(`<= Response: ${response.statusCode} ${JSON.stringify(response.body)}`); success = !!(response.statusCode && response.statusCode >= 200 && response.statusCode < 300); if (success) return { success, response }; - + // Try to avoid breaking requests due to temporary network errors if (attempts > 1 && response.statusCode && [ ErrorCodes.serviceUnavailable, diff --git a/tests/analysis.spec.ts b/tests/analysis.spec.ts index 673045b8..889067d2 100644 --- a/tests/analysis.spec.ts +++ b/tests/analysis.spec.ts @@ -1,11 +1,11 @@ import path from 'path'; import jsonschema from 'jsonschema'; -import { analyzeFolders, extendAnalysis } from '../src/analysis'; +import { analyzeFolders, extendAnalysis, FileAnalysis } from '../src/analysis'; import { uploadRemoteBundle } from '../src/bundles'; import { baseURL, sessionToken, source, TEST_TIMEOUT } from './constants/base'; import { sampleProjectPath, bundleFiles, bundleFilesFull, bundleExtender } from './constants/sample'; -import emitter from '../src/emitter'; +import { emitter } from '../src/emitter'; import { AnalysisResponseProgress } from '../src/http'; import { SupportedFiles } from '../src/interfaces/files.interface'; import { AnalysisSeverity } from '../src/interfaces/analysis-options.interface'; @@ -66,6 +66,9 @@ describe('Functional test of analysis', () => { expect(bundle).toBeTruthy(); if (!bundle) return; // TS trick + expect(bundle.analysisResults.type === 'sarif').toBeTruthy(); + if (bundle.analysisResults.type !== 'sarif') return; + expect(bundle.analysisResults.sarif.runs[0].tool.driver.rules?.length).toEqual(7); expect(bundle.analysisResults.sarif.runs[0].results?.length).toEqual(12); const sampleRes = bundle.analysisResults.sarif.runs[0].results!.find( @@ -152,6 +155,28 @@ describe('Functional test of analysis', () => { TEST_TIMEOUT, ); + it('analyze folder legacy json results', async () => { + const bundle = await analyzeFolders({ + connection: { baseURL, sessionToken, source }, + analysisOptions: { severity: AnalysisSeverity.info, prioritized: true, legacy: true }, + fileOptions: { + paths: [sampleProjectPath], + symlinksEnabled: false, + maxPayload: 1000, + defaultFileIgnores: undefined, + }, + }); + + expect(bundle).toBeTruthy(); + if (!bundle) return; // TS trick + + expect(bundle.analysisResults.type === 'legacy').toBeTruthy(); + if (bundle.analysisResults.type !== 'legacy') return; + + expect(Object.keys(bundle.analysisResults.files)).toHaveLength(5); + expect(Object.keys(bundle.analysisResults.suggestions)).toHaveLength(8); + }); + it('analyze folder - with sarif returned', async () => { const bundle = await analyzeFolders({ connection: { baseURL, sessionToken, source }, @@ -167,6 +192,9 @@ describe('Functional test of analysis', () => { expect(bundle).toBeTruthy(); if (!bundle) return; // TS trick + expect(bundle.analysisResults.type === 'sarif').toBeTruthy(); + if (bundle.analysisResults.type !== 'sarif') return; + const validationResult = jsonschema.validate(bundle.analysisResults.sarif, sarifSchema); expect(validationResult.errors.length).toEqual(0); }); @@ -189,7 +217,7 @@ describe('Functional test of analysis', () => { it( 'extend folder analysis', async () => { - const bundle = await analyzeFolders({ + const fileAnalysis = await analyzeFolders({ connection: { baseURL, sessionToken, source }, analysisOptions: { severity: 1, @@ -201,8 +229,14 @@ describe('Functional test of analysis', () => { }, }); - expect(bundle).toBeTruthy(); - if (!bundle) return; // TS trick + expect(fileAnalysis).toBeTruthy(); + if (!fileAnalysis) return; // TS trick + + expect(fileAnalysis.analysisResults.type === 'sarif').toBeTruthy(); + if (fileAnalysis.analysisResults.type !== 'sarif') return; + + expect(fileAnalysis.analysisResults.sarif.runs[0].tool.driver.rules).toHaveLength(7); + expect(fileAnalysis.analysisResults.sarif.runs[0].results).toHaveLength(12); const extender = await bundleExtender(); type Awaited<T> = T extends PromiseLike<infer U> ? Awaited<U> : T; @@ -210,7 +244,7 @@ describe('Functional test of analysis', () => { try { await extender.exec(); extendedBundle = await extendAnalysis({ - ...bundle, + ...fileAnalysis, files: extender.files.all, }); } catch (err) { @@ -222,10 +256,15 @@ describe('Functional test of analysis', () => { expect(extendedBundle).toBeTruthy(); if (!extendedBundle) return; // TS trick - expect(extendedBundle.analysisResults.sarif.runs[0].tool.driver.rules?.length).toEqual(5); - expect(extendedBundle.analysisResults.sarif.runs[0].results?.length).toEqual(10); + expect(extendedBundle.analysisResults.type === 'sarif').toBeTruthy(); + if (extendedBundle.analysisResults.type !== 'sarif') return; + + const sarifResults = extendedBundle.analysisResults.sarif; + + expect(sarifResults.runs[0].tool.driver.rules).toHaveLength(5); + expect(sarifResults.runs[0].results).toHaveLength(10); const getRes = (path: string) => - extendedBundle!.analysisResults.sarif.runs[0].results!.find( + sarifResults.runs[0].results!.find( res => res.locations?.[0].physicalLocation?.artifactLocation?.uri === path, ); const sampleRes = getRes(extender.files.added); @@ -238,11 +277,11 @@ describe('Functional test of analysis', () => { expect(sampleRes.ruleIndex).toBeDefined(); if (!sampleRes.ruleIndex) return; // TS trick expect(sampleRes!.ruleId).toEqual( - extendedBundle.analysisResults.sarif.runs[0].tool.driver.rules![sampleRes!.ruleIndex!].id, + sarifResults.runs[0].tool.driver.rules![sampleRes!.ruleIndex!].id, ); - expect(bundle.analysisResults.timing.analysis).toBeGreaterThanOrEqual( - bundle.analysisResults.timing.fetchingCode, + expect(extendedBundle.analysisResults.timing.analysis).toBeGreaterThanOrEqual( + extendedBundle.analysisResults.timing.fetchingCode, ); expect(extendedBundle.analysisResults.timing.queue).toBeGreaterThanOrEqual(0); expect(new Set(extendedBundle.analysisResults.coverage)).toEqual( diff --git a/tests/api.spec.ts b/tests/api.spec.ts index 8f4d8304..a2d9c28a 100644 --- a/tests/api.spec.ts +++ b/tests/api.spec.ts @@ -274,7 +274,7 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); - if (response.value.status === AnalysisStatus.complete) { + if (response.value.status === AnalysisStatus.complete && response.value.type === 'sarif' ) { expect(response.value.sarif.runs[0].results).toHaveLength(12); expect(new Set(response.value.coverage)).toEqual( @@ -318,6 +318,10 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); } while (response.value.status !== AnalysisStatus.complete); + + expect(response.value.type === 'sarif').toBeTruthy(); + if (response.value.type !== 'sarif') return; + expect(response.value.sarif.runs[0].results).toHaveLength(8); // Get analysis results with severity 3 @@ -333,6 +337,10 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(response.value.status !== AnalysisStatus.failed).toBeTruthy(); } while (response.value.status !== AnalysisStatus.complete); + + expect(response.value.type === 'sarif').toBeTruthy(); + if (response.value.type !== 'sarif') return; + expect(response.value.sarif.runs[0].results).toHaveLength(4); }, TEST_TIMEOUT, diff --git a/tests/constants/sample.ts b/tests/constants/sample.ts index 4045f395..8f1a97f2 100644 --- a/tests/constants/sample.ts +++ b/tests/constants/sample.ts @@ -64,6 +64,7 @@ export const bundleExtender: () => Promise<{ ); const original = changedFiles.map(path => fBundle.find(f => f.filePath === path)?.content); if (original.some(c => !c)) throw new Error('Content not found. Impossible to restore'); + return { files: { removed: changedFilesNames[0],