Skip to content

Commit

Permalink
refactor: add linter and fix reported errors (#87)
Browse files Browse the repository at this point in the history
  • Loading branch information
derberg authored Jun 16, 2020
1 parent eff8723 commit 580df7e
Show file tree
Hide file tree
Showing 33 changed files with 2,905 additions and 937 deletions.
3 changes: 3 additions & 0 deletions .eslintignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,3 @@
node_modules
dist
lib/browser.js
105 changes: 105 additions & 0 deletions .eslintrc
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
env:
node: true
es6: true
mocha: true

plugins:
- sonarjs
- mocha

extends:
- plugin:sonarjs/recommended
- plugin:mocha/recommended

parserOptions:
ecmaVersion: 2018

rules:
# Ignore Rules
strict: 0
no-underscore-dangle: 0
no-mixed-requires: 0
no-process-exit: 0
no-warning-comments: 0
curly: 0
no-multi-spaces: 0
no-alert: 0
consistent-return: 0
consistent-this: [0, self]
func-style: 0
max-nested-callbacks: 0
camelcase: 0

# Warnings
no-debugger: 1
no-empty: 1
no-invalid-regexp: 1
no-unused-expressions: 1
no-native-reassign: 1
no-fallthrough: 1
sonarjs/cognitive-complexity: 1

# Errors
eqeqeq: 2
no-undef: 2
no-dupe-keys: 2
no-empty-character-class: 2
no-self-compare: 2
valid-typeof: 2
no-unused-vars: [2, { "args": "none" }]
handle-callback-err: 2
no-shadow-restricted-names: 2
no-new-require: 2
no-mixed-spaces-and-tabs: 2
block-scoped-var: 2
no-else-return: 2
no-throw-literal: 2
no-void: 2
radix: 2
wrap-iife: [2, outside]
no-shadow: 0
no-use-before-define: [2, nofunc]
no-path-concat: 2
valid-jsdoc: [0, {requireReturn: false, requireParamDescription: false, requireReturnDescription: false}]

# stylistic errors
no-spaced-func: 2
semi-spacing: 2
quotes: [2, 'single']
key-spacing: [2, { beforeColon: false, afterColon: true }]
indent: [2, 2]
no-lonely-if: 2
no-floating-decimal: 2
brace-style: [2, 1tbs, { allowSingleLine: true }]
comma-style: [2, last]
no-multiple-empty-lines: [2, {max: 1}]
no-nested-ternary: 2
operator-assignment: [2, always]
padded-blocks: [2, never]
quote-props: [2, as-needed]
keyword-spacing: [2, {'before': true, 'after': true, 'overrides': {}}]
space-before-blocks: [2, always]
array-bracket-spacing: [2, never]
computed-property-spacing: [2, never]
space-in-parens: [2, never]
space-unary-ops: [2, {words: true, nonwords: false}]
wrap-regex: 2
linebreak-style: [2, unix]
semi: [2, always]
arrow-spacing: [2, {before: true, after: true}]
no-class-assign: 2
no-const-assign: 2
no-dupe-class-members: 2
no-this-before-super: 2
no-var: 2
object-shorthand: [2, always]
prefer-arrow-callback: 2
prefer-const: 2
prefer-spread: 2
prefer-template: 2

overrides:
- files: "test/**"
rules:
prefer-arrow-callback: 0
sonarjs/no-duplicate-string: 0
2 changes: 2 additions & 0 deletions .github/workflows/pull-request-testing.yml
Original file line number Diff line number Diff line change
Expand Up @@ -17,5 +17,7 @@ jobs:
node-version: 13
- name: Install dependencies
run: npm ci
- name: Run linter
run: npm run lint
- name: Run tests
run: npm test
1 change: 1 addition & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -1,2 +1,3 @@
node_modules
.vscode
.nyc_output
22 changes: 11 additions & 11 deletions lib/errors/parser-error.js
Original file line number Diff line number Diff line change
@@ -1,5 +1,16 @@
const ERROR_URL_PREFIX = 'https://github.com/asyncapi/parser-js/';

const buildError = (from, to) => {
to.type = from.type.startsWith(ERROR_URL_PREFIX) ? from.type : `${ERROR_URL_PREFIX}${from.type}`;
to.title = from.title;
if (from.detail) to.detail = from.detail;
if (from.validationErrors) to.validationErrors = from.validationErrors;
if (from.parsedJSON) to.parsedJSON = from.parsedJSON;
if (from.location) to.location = from.location;
if (from.refs) to.refs = from.refs;
return to;
};

/**
* Represents an error while trying to parse an AsyncAPI document.
*/
Expand Down Expand Up @@ -48,15 +59,4 @@ class ParserError extends Error {
}
}

const buildError = (from, to) => {
to.type = from.type.startsWith(ERROR_URL_PREFIX) ? from.type : `${ERROR_URL_PREFIX}${from.type}`;
to.title = from.title;
if (from.detail) to.detail = from.detail;
if (from.validationErrors) to.validationErrors = from.validationErrors;
if (from.parsedJSON) to.parsedJSON = from.parsedJSON;
if (from.location) to.location = from.location;
if (from.refs) to.refs = from.refs;
return to;
};

module.exports = ParserError;
37 changes: 17 additions & 20 deletions lib/json-parse.js
Original file line number Diff line number Diff line change
@@ -1,38 +1,35 @@
module.exports = (txt, reviver, context = 20) => {
try {
return JSON.parse(txt, reviver)
return JSON.parse(txt, reviver);
} catch (e) {
if (typeof txt !== 'string') {
const isEmptyArray = Array.isArray(txt) && txt.length === 0
const errorMessage = 'Cannot parse ' +
(isEmptyArray ? 'an empty array' : String(txt))
throw new TypeError(errorMessage)
const isEmptyArray = Array.isArray(txt) && txt.length === 0;
const errorMessage = `Cannot parse ${
isEmptyArray ? 'an empty array' : String(txt)}`;
throw new TypeError(errorMessage);
}
const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i)
const errIdx = syntaxErr
? +syntaxErr[1]
: e.message.match(/^Unexpected end of JSON.*/i)
? txt.length - 1
: null
if (errIdx != null) {
const syntaxErr = e.message.match(/^Unexpected token.*position\s+(\d+)/i);
const errIdxBrokenJson = e.message.match(/^Unexpected end of JSON.*/i) ? txt.length - 1 : null;
const errIdx = syntaxErr ? +syntaxErr[1] : errIdxBrokenJson;
if (errIdx !== null) {
const start = errIdx <= context
? 0
: errIdx - context
: errIdx - context;
const end = errIdx + context >= txt.length
? txt.length
: errIdx + context
: errIdx + context;
e.message += ` while parsing near '${
start === 0 ? '' : '...'
}${txt.slice(start, end)}${
}${txt.slice(start, end)}${
end === txt.length ? '' : '...'
}'`
}'`;
} else {
e.message += ` while parsing '${txt.slice(0, context * 2)}'`
e.message += ` while parsing '${txt.slice(0, context * 2)}'`;
}
e.offset = errIdx
e.offset = errIdx;
const lines = txt.substr(0, errIdx).split('\n');
e.startLine = lines.length;
e.startColumn = lines[lines.length - 1].length;
throw e
throw e;
}
}
};
96 changes: 49 additions & 47 deletions lib/models/asyncapi.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,8 +4,9 @@ const Info = require('./info');
const Server = require('./server');
const Channel = require('./channel');
const Components = require('./components');
const Message = require('./message');
const Tag = require('./tag');
const xParserMessageName = 'x-parser-message-name';
const xParserSchemaId = 'x-parser-schema-id';

/**
* Implements functions to deal with the AsyncAPI document.
Expand Down Expand Up @@ -139,7 +140,7 @@ class AsyncAPIDocument extends Base {
* @returns {boolean}
*/
hasMessages() {
return !!this.allMessages().size
return !!this.allMessages().size;
}

/**
Expand Down Expand Up @@ -198,7 +199,7 @@ function assignNameToComponentMessages(doc) {
if (doc.hasComponents()) {
for (const [key, m] of Object.entries(doc.components().messages())) {
if (m.name() === undefined) {
m.json()['x-parser-message-name'] = key;
m.json()[xParserMessageName] = key;
}
}
}
Expand All @@ -213,7 +214,7 @@ function assignUidToParameterSchemas(doc) {
doc.channelNames().forEach(channelName => {
const channel = doc.channel(channelName);
for (const [parameterKey, parameterSchema] of Object.entries(channel.parameters())) {
parameterSchema.json()['x-parser-schema-id'] = parameterKey;
parameterSchema.json()[xParserSchemaId] = parameterKey;
}
});
}
Expand All @@ -226,7 +227,7 @@ function assignUidToParameterSchemas(doc) {
function assignUidToComponentSchemas(doc) {
if (doc.hasComponents()) {
for (const [key, s] of Object.entries(doc.components().schemas())) {
s.json()['x-parser-schema-id'] = key;
s.json()[xParserSchemaId] = key;
}
}
}
Expand All @@ -242,24 +243,25 @@ function assignNameToAnonymousMessages(doc) {
if (doc.hasChannels()) {
doc.channelNames().forEach(channelName => {
const channel = doc.channel(channelName);
if (channel.hasPublish()) {
channel.publish().messages().forEach(m => {
if (m.name() === undefined) {
m.json()['x-parser-message-name'] = `<anonymous-message-${++anonymousMessageCounter}>`;
}
});
}
if (channel.hasSubscribe()) {
channel.subscribe().messages().forEach(m => {
if (m.name() === undefined) {
m.json()['x-parser-message-name'] = `<anonymous-message-${++anonymousMessageCounter}>`;
}
});
}
if (channel.hasPublish()) addNameToKey(channel.publish().messages(),++anonymousMessageCounter);
if (channel.hasSubscribe()) addNameToKey(channel.subscribe().messages(),++anonymousMessageCounter);
});
}
}

/**
* Add anonymous name to key if no name provided.
*
* @param {messages} map of messages
*/
function addNameToKey(messages, number) {
messages.forEach(m => {
if (m.name() === undefined) {
m.json()[xParserMessageName] = `<anonymous-message-${number}>`;
}
});
}

/**
* Recursively go through each schema and execute callback.
*
Expand All @@ -274,44 +276,44 @@ function recursiveSchema(schema, callback) {

if (schema.type() !== undefined) {
switch (schema.type()) {
case 'object':
if(schema.additionalProperties() !== undefined && typeof schema.additionalProperties() !== "boolean"){
const additionalSchema = schema.additionalProperties()
recursiveSchema(additionalSchema, callback);
}
if(schema.properties() !== null){
const props = schema.properties();
for (const [, propertySchema] of Object.entries(props)) {
recursiveSchema(propertySchema, callback);
}
}
break;
case 'array':
if(schema.additionalItems() !== undefined){
const additionalArrayItems = schema.additionalItems()
recursiveSchema(additionalArrayItems, callback);
case 'object':
if (schema.additionalProperties() !== undefined && typeof schema.additionalProperties() !== 'boolean') {
const additionalSchema = schema.additionalProperties();
recursiveSchema(additionalSchema, callback);
}
if (schema.properties() !== null) {
const props = schema.properties();
for (const [, propertySchema] of Object.entries(props)) {
recursiveSchema(propertySchema, callback);
}
}
break;
case 'array':
if (schema.additionalItems() !== undefined) {
const additionalArrayItems = schema.additionalItems();
recursiveSchema(additionalArrayItems, callback);
}

if(schema.items() !== null){
if (Array.isArray(schema.items())) {
schema.items().forEach(arraySchema => {
recursiveSchema(arraySchema, callback);
});
} else {
recursiveSchema(schema.items(), callback);
}
if (schema.items() !== null) {
if (Array.isArray(schema.items())) {
schema.items().forEach(arraySchema => {
recursiveSchema(arraySchema, callback);
});
} else {
recursiveSchema(schema.items(), callback);
}
break;
}
break;
}
} else {
//check for allOf, oneOf, anyOf
const checkCombiningSchemas = (combineArray) => {
if (combineArray !== null && combineArray.length > 0) {
combineArray.forEach(combineSchema => {
recursiveSchema(combineSchema, callback);;
recursiveSchema(combineSchema, callback); ;
});
}
}
};
checkCombiningSchemas(schema.allOf());
checkCombiningSchemas(schema.anyOf());
checkCombiningSchemas(schema.oneOf());
Expand Down Expand Up @@ -360,7 +362,7 @@ function assignIdToAnonymousSchemas(doc) {
let anonymousSchemaCounter = 0;
const callback = (schema) => {
if (!schema.uid()) {
schema.json()['x-parser-schema-id'] = `<anonymous-schema-${++anonymousSchemaCounter}>`;
schema.json()[xParserSchemaId] = `<anonymous-schema-${++anonymousSchemaCounter}>`;
}
};
schemaDocument(doc, callback);
Expand Down
Loading

0 comments on commit 580df7e

Please sign in to comment.