diff --git a/package.json b/package.json index d1aa96ca..7f3ffc94 100644 --- a/package.json +++ b/package.json @@ -45,30 +45,32 @@ "eslint-plugin-import": "^2.22.0", "eslint-plugin-prettier": "^3.1.4", "jest": "^26.4.2", + "jsonschema": "^1.2.11", "prettier": "^2.1.1", "ts-jest": "^26.3.0", "typescript": "^4.0.2", - "yalc": "^1.0.0-pre.44", - "jsonschema": "^1.2.11", - "write": "^2.0.0" + "write": "^2.0.0", + "yalc": "^1.0.0-pre.44" }, "dependencies": { - "@types/lodash.omit": "^4.5.6", + "@deepcode/dcignore": "^1.0.2", + "@snyk/fast-glob": "^3.2.6-patch", + "@types/flat-cache": "^2.0.0", "@types/lodash.chunk": "^4.2.6", + "@types/lodash.difference": "^4.5.6", + "@types/lodash.omit": "^4.5.6", "@types/lodash.union": "^4.6.6", "@types/micromatch": "^4.0.1", - "@types/flat-cache": "^2.0.0", "@types/sarif": "^2.1.3", - "@deepcode/dcignore": "^1.0.2", + "@types/uuid": "^8.3.0", "axios": "^0.21.1", - "lodash.omit": "^4.5.0", + "ignore": "^5.1.8", "lodash.chunk": "^4.2.0", + "lodash.difference": "^4.5.0", + "lodash.omit": "^4.5.0", "lodash.union": "^4.6.0", - "ignore": "^5.1.8", - "queue": "^6.0.1", - "@snyk/fast-glob": "^3.2.6-patch", "micromatch": "^4.0.2", - "@types/uuid": "^8.3.0", + "queue": "^6.0.1", "uuid": "^8.3.2" } } diff --git a/src/axios.ts b/src/axios.ts index a802c462..f5a37ae4 100644 --- a/src/axios.ts +++ b/src/axios.ts @@ -3,12 +3,21 @@ import http from 'http'; import https from 'https'; import emitter from './emitter'; +// Snyk CLI allow passing --insecure flag which allows self-signed certificates +// It updates global namespace property ignoreUnknownCA and we can use it in order +// to pass rejectUnauthorized option to https agent +export declare interface Global extends NodeJS.Global { + ignoreUnknownCA: boolean; +} +declare const global: Global; + const agentOptions = { keepAlive: true, maxSockets: 100, // Maximum number of sockets to allow per host. Defaults to Infinity. maxFreeSockets: 10, freeSocketTimeout: 60000, // // Maximum number of sockets to leave open for 60 seconds in a free state. Only relevant if keepAlive is set to true. Defaults to 256. socketActiveTTL: 1000 * 60 * 10, + rejectUnauthorized: !global.ignoreUnknownCA, }; const axios_ = axios.create({ diff --git a/src/constants.ts b/src/constants.ts index 1a5c158d..b7f98797 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -12,7 +12,7 @@ export const EXCLUDED_NAMES = [GIT_FILENAME, GITIGNORE_FILENAME, DCIGNORE_FILENA export const CACHE_KEY = '.dccache'; export const MAX_UPLOAD_ATTEMPTS = 5; -export const IGNORES_DEFAULT = [`**/${GIT_FILENAME}`]; +export const IGNORES_DEFAULT = [`**/${GIT_FILENAME}/**`]; export const IGNORE_FILES_NAMES = [GITIGNORE_FILENAME, DCIGNORE_FILENAME]; diff --git a/src/files.ts b/src/files.ts index 15dbd131..6ed82f1c 100644 --- a/src/files.ts +++ b/src/files.ts @@ -4,6 +4,7 @@ import fg from '@snyk/fast-glob'; import micromatch from 'micromatch'; import crypto from 'crypto'; import union from 'lodash.union'; +import difference from 'lodash.difference'; import util from 'util'; import { Cache } from './cache'; import { HASH_ALGORITHM, ENCODE_TYPE, MAX_PAYLOAD, IGNORES_DEFAULT, IGNORE_FILES_NAMES, CACHE_KEY } from './constants'; @@ -76,11 +77,14 @@ export function parseFileIgnores(path: string): string[] { } return rules.map(rule => { - if (rule.startsWith('/') || rule.startsWith('**')) { - return nodePath.posix.join(dirname, rule); + let prefix = ""; + if (rule.startsWith('!')) { + rule = rule.substring(1); + prefix = "!"; } - - return nodePath.posix.join(dirname, '**', rule); + return rule.startsWith('/') || rule.startsWith('**') + ? prefix + nodePath.posix.join(dirname, rule, '**') + : prefix + nodePath.posix.join(dirname, '**', rule, '**'); }); } @@ -107,7 +111,6 @@ export async function collectIgnoreRules( IGNORE_FILES_NAMES.map(i => `*${i}`), { ...fgOptions, - ignore: fileIgnores, cwd: folder, followSymbolicLinks: symlinksEnabled, }, @@ -132,25 +135,22 @@ export function determineBaseDir(paths: string[]): string { return ''; } -function searchFiles( +async function* searchFiles( patterns: string[], cwd: string, symlinksEnabled: boolean, ignores: string[], -): NodeJS.ReadableStream { - const relIgnores = ignores.map(i => { - if (i.startsWith(cwd)) { - return i.slice(cwd.length + 1); - } - return i; - }); - - return fg.stream(patterns, { +): AsyncGenerator { + const searcher = fg.stream(patterns, { ...fgOptions, cwd, - ignore: relIgnores, followSymbolicLinks: symlinksEnabled, }); + for await (const filePath of searcher) { + if (filterIgnoredFiles([filePath.toString()], ignores).length) { + yield filePath; + } + } } /** @@ -224,19 +224,14 @@ export async function prepareExtendingBundle( // Filter for supported extensions/files only let processingFiles: string[] = filterSupportedFiles(files, supportedFiles); - // Exclude files to be ignored based on ignore rules. We assume here, that ignore rules have not been changed - processingFiles = micromatch( - processingFiles, - fileIgnores.map(p => `!${p}`), - microMatchOptions, - ); + // Exclude files to be ignored based on ignore rules. We assume here, that ignore rules have not been changed. + processingFiles = filterIgnoredFiles(processingFiles, fileIgnores); if (processingFiles.length) { // Determine existing files (minus removed) const entries = await fg(processingFiles, { ...fgOptions, cwd: baseDir, - ignore: fileIgnores, followSymbolicLinks: symlinksEnabled, objectMode: true, stats: true, @@ -391,3 +386,11 @@ export function* composeFilePayloads(files: IFileInfo[], bucketSize = MAX_PAYLOA yield bucket.files; } } + +export function filterIgnoredFiles( + filePaths: string[], + ignores: string[], +) { + const ignored = micromatch(filePaths, ignores, {...microMatchOptions, basename: false}); + return difference(filePaths, ignored); +} diff --git a/tests/analysis.spec.ts b/tests/analysis.spec.ts index 29a3be0b..972e2792 100644 --- a/tests/analysis.spec.ts +++ b/tests/analysis.spec.ts @@ -64,7 +64,7 @@ describe('Functional test of analysis', () => { expect(bundle).toHaveProperty('sessionToken'); expect(bundle).toHaveProperty('supportedFiles'); expect(bundle).toHaveProperty('analysisURL'); - expect(Object.keys(bundle.analysisResults.files).length).toEqual(3); + expect(Object.keys(bundle.analysisResults.files).length).toEqual(4); expect( bundle.analysisResults.files.hasOwnProperty(`${sampleProjectPath}/GitHubAccessTokenScrambler12.java`), ).toBeTruthy(); @@ -75,7 +75,7 @@ describe('Functional test of analysis', () => { expect(new Set(bundle.analysisResults.coverage)).toEqual( new Set([ { - files: 1, + files: 2, isSupported: true, lang: 'Java', }, @@ -89,7 +89,7 @@ describe('Functional test of analysis', () => { // Check if emitter event happened expect(onSupportedFilesLoaded).toHaveBeenCalledTimes(2); - expect(onScanFilesProgress).toHaveBeenCalledTimes(6); + expect(onScanFilesProgress).toHaveBeenCalledTimes(7); expect(onCreateBundleProgress).toHaveBeenCalledTimes(3); expect(onAnalyseProgress).toHaveBeenCalled(); expect(onAPIRequestLog).toHaveBeenCalled(); diff --git a/tests/api.spec.ts b/tests/api.spec.ts index 47ccf8a0..38250776 100644 --- a/tests/api.spec.ts +++ b/tests/api.spec.ts @@ -1,5 +1,5 @@ import { baseURL, authHost, sessionToken, TEST_TIMEOUT } from './constants/base'; -import { bundleFiles, bundleFilesFull } from './constants/sample'; +import { bundleFiles, bundleFilesFull, bundleFilePaths } from './constants/sample'; import { fromEntries } from '../src/lib/utils'; import { getFilters, @@ -16,9 +16,9 @@ import { reportEvent, } from '../src/http'; -const fakeBundleId = '79925ce5d4dbfcb9f7f90f671bfcbdaebf394b3c91b49eb4d2b57f109d2abcc6'; +const fakeBundleId = '769e1811db5e98abdb65df9160228ad51646f535af28b80a8913813aec1bd331'; let fakeBundleIdFull = ''; -const realBundleId = '39bc8dbc1e4fd197323fe352231ff3afad2cd2ea191b4abeb3613340c8752ea0'; +const realBundleId = 'ffdd7b1cb8e28d3859e6b7179bf4b871d1abd8a7a2cacf0002d5174aca3b6841'; let realBundleIdFull = ''; const reportTelemetryRequest = { @@ -42,7 +42,7 @@ describe('Requests to public API', () => { if (response.type === 'error') return; expect(new Set(response.value.configFiles)).toEqual(new Set(['.dcignore', '.gitignore'])); expect(new Set(response.value.extensions)).toEqual( - new Set(['.es', '.es6', '.htm', '.html', '.js', '.jsx', '.py', '.ts', '.tsx', '.vue', '.java']), + new Set(['.es', '.es6', '.htm', '.html', '.js', '.jsx', '.py', '.ts', '.tsx', '.vue', '.java', '.java-dummy']), ); }); @@ -112,6 +112,7 @@ describe('Requests to public API', () => { `/app.js`, `/db.js`, `/main.js`, + `/not/ignored/this_should_not_be_ignored.java`, `/routes/index.js`, `/routes/sharks.js`, ]); @@ -135,6 +136,7 @@ describe('Requests to public API', () => { `/app.js`, `/db.js`, `/main.js`, + `/not/ignored/this_should_not_be_ignored.java`, `/routes/index.js`, `/routes/sharks.js`, ]); @@ -199,6 +201,7 @@ describe('Requests to public API', () => { `/GitHubAccessTokenScrambler12.java`, `/db.js`, `/main.js`, + `/not/ignored/this_should_not_be_ignored.java`, `/routes/index.js`, `/routes/sharks.js`, ], @@ -360,7 +363,7 @@ describe('Requests to public API', () => { expect(suggestion.severity).toEqual(2); expect(suggestion.tags).toEqual(['maintenance', 'express', 'server', 'helmet']); - expect(Object.keys(response.value.analysisResults.files).length).toEqual(3); + expect(Object.keys(response.value.analysisResults.files).length).toEqual(4); expect(response.value.analysisResults.timing.analysis).toBeGreaterThanOrEqual( response.value.analysisResults.timing.fetchingCode, ); @@ -370,7 +373,7 @@ describe('Requests to public API', () => { expect(new Set(response.value.analysisResults.coverage)).toEqual( new Set([ { - files: 1, + files: 2, isSupported: true, lang: 'Java', }, @@ -401,7 +404,9 @@ describe('Requests to public API', () => { } while (response.value.status !== AnalysisStatus.done); expect(Object.keys(response.value.analysisResults.suggestions).length).toEqual(4); - expect(Object.keys(response.value.analysisResults.files)).toEqual(['/GitHubAccessTokenScrambler12.java']); + expect(new Set(Object.keys(response.value.analysisResults.files))).toEqual( + new Set(['/GitHubAccessTokenScrambler12.java', '/not/ignored/this_should_not_be_ignored.java']), + ); // Get analysis results without linters but with severity 3 do { @@ -419,7 +424,9 @@ describe('Requests to public API', () => { } while (response.value.status !== AnalysisStatus.done); expect(Object.keys(response.value.analysisResults.suggestions).length).toEqual(2); - expect(Object.keys(response.value.analysisResults.files)).toEqual(['/GitHubAccessTokenScrambler12.java']); + expect(new Set(Object.keys(response.value.analysisResults.files))).toEqual( + new Set(['/GitHubAccessTokenScrambler12.java', '/not/ignored/this_should_not_be_ignored.java']), + ); }, TEST_TIMEOUT, ); diff --git a/tests/constants/sample.ts b/tests/constants/sample.ts index 2a2911b4..40ec2f78 100644 --- a/tests/constants/sample.ts +++ b/tests/constants/sample.ts @@ -5,11 +5,18 @@ import { getFileInfo, notEmpty } from '../../src/files'; export const sampleProjectPath = path.resolve(__dirname, '../sample-repo'); export const supportedFiles = { - extensions: ['.js', '.cpp', '.java'], + extensions: ['.js', '.jsx', '.cpp', '.java'], configFiles: ['.eslintrc.json', '.dcignore'], }; -export const bundleFileIgnores = ['**/.git', `${sampleProjectPath}/**/models`, `${sampleProjectPath}/**/controllers`]; +export const bundleFileIgnores = [ + '**/.git/**', + `${sampleProjectPath}/**/models/**`, + `${sampleProjectPath}/**/controllers/**`, + `${sampleProjectPath}/**/ignored/**`, + `!${sampleProjectPath}/**/not/ignored/**`, + `${sampleProjectPath}/**/*.jsx/**` +]; export const bundleFilePaths = [ '/.eslintrc.json', @@ -20,6 +27,7 @@ export const bundleFilePaths = [ 'main.js', 'routes/index.js', 'routes/sharks.js', + 'not/ignored/this_should_not_be_ignored.java', ]; async function getBundleFiles(withContent: boolean) { diff --git a/tests/files.spec.ts b/tests/files.spec.ts index 6554c226..3b78d795 100644 --- a/tests/files.spec.ts +++ b/tests/files.spec.ts @@ -5,12 +5,13 @@ import * as nodePath from 'path'; import { collectIgnoreRules, collectBundleFiles, + prepareExtendingBundle, composeFilePayloads, parseFileIgnores, getFileInfo, } from '../src/files'; -import { sampleProjectPath, supportedFiles, bundleFiles, bundleFilesFull, bundleFileIgnores } from './constants/sample'; +import { sampleProjectPath, supportedFiles, bundleFiles, bundleFilePaths, bundleFilesFull, bundleFileIgnores } from './constants/sample'; describe('files', () => { @@ -34,6 +35,21 @@ describe('files', () => { expect(testFile.size).toEqual(239); }); + it('extend bundle files', async () => { + const testNewFiles = [ + `${sampleProjectPath}/app.js`, + `${sampleProjectPath}/not/ignored/this_should_not_be_ignored.java` + ]; + const testRemovedFiles = [ + `${sampleProjectPath}/removed_from_the_parent_bundle.java`, + `${sampleProjectPath}/ignored/this_should_be_ignored.java` + ]; + const parentBundle = [...testNewFiles, ...testRemovedFiles] + const { files, removedFiles } = await prepareExtendingBundle(sampleProjectPath, parentBundle, supportedFiles, bundleFileIgnores); + expect(files).toEqual((await bundleFiles).filter(obj => testNewFiles.includes(obj.filePath))); + expect(removedFiles).toEqual(['/removed_from_the_parent_bundle.java']); + }); + it('collect bundle files with small max payload', async () => { // Limit size and we get fewer files const collector = collectBundleFiles( @@ -53,7 +69,7 @@ describe('files', () => { it('collect bundle files with multiple folders', async () => { // Limit size and we get fewer files const folders = [nodePath.join(sampleProjectPath, 'models'), nodePath.join(sampleProjectPath, 'controllers')]; - const collector = collectBundleFiles(sampleProjectPath, folders, supportedFiles, bundleFileIgnores); + const collector = collectBundleFiles(sampleProjectPath, folders, supportedFiles); const smallFiles = []; for await (const f of collector) { smallFiles.push(f); @@ -68,7 +84,7 @@ describe('files', () => { it('compose file payloads', async () => { // Prepare all missing files first const payloads = [...composeFilePayloads(await bundleFilesFull, 1024)]; - expect(payloads.length).toEqual(4); // 4 chunks + expect(payloads.length).toEqual(5); // 5 chunks expect(payloads[0].length).toEqual(4); const testPayload = payloads[0][1]; @@ -81,7 +97,7 @@ describe('files', () => { it('parse dc ignore file', () => { const patterns = parseFileIgnores(`${sampleProjectPath}/.dcignore`); - expect(patterns.length).toEqual(2); + expect(patterns.length).toEqual(5); }); it('support of utf-8 encoding', async () => { diff --git a/tests/git.analysis.spec.ts b/tests/git.analysis.spec.ts index 8ea73002..220250f5 100644 --- a/tests/git.analysis.spec.ts +++ b/tests/git.analysis.spec.ts @@ -95,7 +95,7 @@ describe('Functional test of analysis', () => { }); // Test DC JSON format first - expect(Object.keys(bundle.analysisResults.suggestions).length).toEqual(119); + expect(Object.keys(bundle.analysisResults.suggestions).length).toEqual(134); const cweSuggestion = Object.values(bundle.analysisResults.suggestions).find( s => s.id === 'java%2Fdc_interfile_project%2FPT', @@ -105,8 +105,8 @@ describe('Functional test of analysis', () => { expect(cweSuggestion?.title).toBeTruthy(); expect(cweSuggestion?.text).toBeTruthy(); - expect(bundle.sarifResults?.runs[0].results?.length).toEqual(400); - expect(bundle.sarifResults?.runs[0].tool?.driver.rules?.length).toEqual(119); + expect(bundle.sarifResults?.runs[0].results?.length).toEqual(442); + expect(bundle.sarifResults?.runs[0].tool?.driver.rules?.length).toEqual(134); const cweRule = bundle.sarifResults?.runs[0].tool?.driver.rules?.find(r => r.id === 'java/PT'); expect(cweRule?.properties?.cwe).toContain('CWE-23'); diff --git a/tests/sample-repo/.dcignore b/tests/sample-repo/.dcignore index c1c861bd..495f4410 100644 --- a/tests/sample-repo/.dcignore +++ b/tests/sample-repo/.dcignore @@ -1,2 +1,6 @@ models **/controllers + +ignored +!not/ignored +*.jsx \ No newline at end of file diff --git a/tests/sample-repo/ignored/this_should_be_ignored.java b/tests/sample-repo/ignored/this_should_be_ignored.java new file mode 100644 index 00000000..13afcc6a --- /dev/null +++ b/tests/sample-repo/ignored/this_should_be_ignored.java @@ -0,0 +1,28 @@ + + +import org.apache.commons.codec.binary.Base64; + +import javax.crypto.Cipher; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; + +public class GitHubAccessTokenScrambler12 { + static final String myInitVector = "RandomInitVector"; + static final String myKey = "GitHubErrorToken"; + + static String encrypt(String value) { + try { + IvParameterSpec iv = new IvParameterSpec(myInitVector.getBytes("UTF-8")); + SecretKeySpec keySpec = new SecretKeySpec(myKey.getBytes("UTF-8"), "AES"); + + Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5PADDING"); + cipher.init(Cipher.ENCRYPT_MODE, keySpec, iv); + + byte[] encrypted = cipher.doFinal(value.getBytes()); + return Base64.encodeBase64String(encrypted); + } catch (Exception ex) { + ex.printStackTrace(); + } + return null; + } +} diff --git a/tests/sample-repo/not/ignored/this_should_be_ignored.jsx b/tests/sample-repo/not/ignored/this_should_be_ignored.jsx new file mode 100644 index 00000000..6e81f7cf --- /dev/null +++ b/tests/sample-repo/not/ignored/this_should_be_ignored.jsx @@ -0,0 +1,9 @@ +const mongoose = require('mongoose'); +const Schema = mongoose.Schema; + +const Shark = new Schema({ + name: { type: String, required: true }, + character: { type: String, required: true }, +}); + +module.exports = mongoose.model('Shark', Shark) diff --git a/tests/sample-repo/not/ignored/this_should_not_be_ignored.java b/tests/sample-repo/not/ignored/this_should_not_be_ignored.java new file mode 100644 index 00000000..13afcc6a --- /dev/null +++ b/tests/sample-repo/not/ignored/this_should_not_be_ignored.java @@ -0,0 +1,28 @@ + + +import org.apache.commons.codec.binary.Base64; + +import javax.crypto.Cipher; +import javax.crypto.spec.IvParameterSpec; +import javax.crypto.spec.SecretKeySpec; + +public class GitHubAccessTokenScrambler12 { + static final String myInitVector = "RandomInitVector"; + static final String myKey = "GitHubErrorToken"; + + static String encrypt(String value) { + try { + IvParameterSpec iv = new IvParameterSpec(myInitVector.getBytes("UTF-8")); + SecretKeySpec keySpec = new SecretKeySpec(myKey.getBytes("UTF-8"), "AES"); + + Cipher cipher = Cipher.getInstance("AES/CBC/PKCS5PADDING"); + cipher.init(Cipher.ENCRYPT_MODE, keySpec, iv); + + byte[] encrypted = cipher.doFinal(value.getBytes()); + return Base64.encodeBase64String(encrypted); + } catch (Exception ex) { + ex.printStackTrace(); + } + return null; + } +}