Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/provide better exception when importing duplicates #557

Merged
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion application/cmd/cre_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -643,7 +643,7 @@ def run(args: argparse.Namespace) -> None: # pragma: no cover
if args.start_worker:
from application.worker import start_worker

start_worker(args.cache_file)
start_worker()

if args.preload_map_analysis_target_url:
gap_analysis.preload(target_url=args.preload_map_analysis_target_url)
Expand Down
23 changes: 20 additions & 3 deletions application/defs/cre_defs.py
Original file line number Diff line number Diff line change
Expand Up @@ -366,11 +366,17 @@ def add_link(self, link: Link) -> "Document":
if not self.links:
self.links = []
if not isinstance(link, Link):
raise ValueError("add_link only takes Link() types")
raise ValueError(
f"add_link only takes Link() types, instead it was provided with '{type(link)}', that's an internal bug, please open a ticket"
)
if link.document.id == self.id:
raise ValueError("Cannot link a document to itself")
raise ValueError(
f"Cannot link a document to itself, {self.id} is the same as the link"
)
if link.document.id in [l.document.id for l in self.links]:
raise ValueError("Cannot link the same document twice")
raise cre_exceptions.DuplicateLinkException(
f"Cannot link the same document twice, document {link.document.id} is already linked to {self.id}"
)

self.links.append(link)
return self
Expand Down Expand Up @@ -453,6 +459,11 @@ def __post_init__(self):
self.id += f":{self.section}"
if self.subsection:
self.id += f":{self.subsection}"
if self.hyperlink:
self.id += f":{self.hyperlink}"
if self.version:
self.id += f":{self.version}"

return super().__post_init__()

def todict(self) -> Dict[Any, Any]:
Expand Down Expand Up @@ -492,6 +503,12 @@ def __post_init__(self):
self.id += f":{self.section}"
if self.subsection:
self.id += f":{self.subsection}"
if self.hyperlink:
self.id += f":{self.hyperlink}"
if self.version:
self.id += f":{self.version}"
if self.tooltype != ToolTypes.Unknown:
self.id += f":{self.tooltype.value}"
return super().__post_init__()

def __eq__(self, other: object) -> bool:
Expand Down
5 changes: 5 additions & 0 deletions application/defs/cre_exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,8 @@ class InvalidCREIDException(DocumentFormatException):
def __init__(self, cre):
self.message = f"CRE ID '{cre.id}' does not fit pattern '\d\d\d-\d\d\d', cre name is {cre.name}"
super().__init__(self.message)


class DuplicateLinkException(Exception):
def __init__(self, message):
super().__init__(message)
30 changes: 23 additions & 7 deletions application/tests/cre_main_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -261,14 +261,18 @@ def test_register_cre(self) -> None:
c_higher = self.collection.get_CREs(cre_higher.id)[0]
c_lower = self.collection.get_CREs(cre_lower.id)[0]
c_equal = self.collection.get_CREs(cre_equal.id)[0]
c = self.collection.get_CREs(cre.id)[0]
retrieved_cre = self.collection.get_CREs(cre.id)[0]

self.assertCountEqual(
c_higher.links,
[defs.Link(document=c.shallow_copy(), ltype=defs.LinkTypes.Contains)],
[
defs.Link(
document=retrieved_cre.shallow_copy(), ltype=defs.LinkTypes.Contains
)
],
)
self.assertCountEqual(
c.links,
retrieved_cre.links,
[
defs.Link(document=standard, ltype=defs.LinkTypes.LinkedTo),
defs.Link(document=tool, ltype=defs.LinkTypes.LinkedTo),
Expand All @@ -286,15 +290,27 @@ def test_register_cre(self) -> None:

self.assertCountEqual(
c_lower.links,
[defs.Link(document=c.shallow_copy(), ltype=defs.LinkTypes.PartOf)],
[
defs.Link(
document=retrieved_cre.shallow_copy(), ltype=defs.LinkTypes.PartOf
)
],
)
self.assertCountEqual(
c_higher.links,
[defs.Link(document=c.shallow_copy(), ltype=defs.LinkTypes.Contains)],
[
defs.Link(
document=retrieved_cre.shallow_copy(), ltype=defs.LinkTypes.Contains
)
],
)
self.assertCountEqual(
c_equal.links,
[defs.Link(document=c.shallow_copy(), ltype=defs.LinkTypes.Related)],
[
defs.Link(
document=retrieved_cre.shallow_copy(), ltype=defs.LinkTypes.Related
)
],
)

def test_parse_file(self) -> None:
Expand Down Expand Up @@ -367,7 +383,7 @@ def test_parse_file(self) -> None:
"objects",
"here",
{
1: 2,
"1": 2,
},
],
scollection=self.collection,
Expand Down
26 changes: 13 additions & 13 deletions application/tests/defs_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ def test_document_todict(self) -> None:
version="0.0.0",
)
standard_output = {
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1",
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1:0.0.0",
"doctype": "Standard",
"name": "ASVS",
"section": "SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING",
Expand All @@ -35,14 +35,14 @@ def test_document_todict(self) -> None:
)
cre_output = {
"description": "CREdesc",
"doctype": defs.Credoctypes.CRE,
"doctype": defs.Credoctypes.CRE.value,
"id": "100-100",
"links": [
{
"ltype": defs.LinkTypes.LinkedTo,
"ltype": defs.LinkTypes.LinkedTo.value,
"document": {
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1",
"doctype": defs.Credoctypes.Standard,
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1:0.0.0",
"doctype": defs.Credoctypes.Standard.value,
"name": "ASVS",
"section": "SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING",
"subsection": "3.1.1",
Expand Down Expand Up @@ -75,17 +75,17 @@ def test_document_todict(self) -> None:
"id": "500-500",
"links": [
{
"ltype": defs.LinkTypes.Related,
"ltype": defs.LinkTypes.Related.value,
"document": {
"description": "CREdesc",
"doctype": defs.Credoctypes.CRE,
"doctype": defs.Credoctypes.CRE.value,
"id": "100-100",
"links": [
{
"ltype": defs.LinkTypes.LinkedTo,
"ltype": defs.LinkTypes.LinkedTo.value,
"document": {
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1",
"doctype": defs.Credoctypes.Standard,
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1:0.0.0",
"doctype": defs.Credoctypes.Standard.value,
"name": "ASVS",
"section": (
"SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING"
Expand All @@ -100,10 +100,10 @@ def test_document_todict(self) -> None:
},
},
{
"ltype": defs.LinkTypes.LinkedTo,
"ltype": defs.LinkTypes.LinkedTo.value,
"document": {
"id": "Standard:StandardSection:3.1.1",
"doctype": defs.Credoctypes.Standard,
"doctype": defs.Credoctypes.Standard.value,
"name": "Standard",
"section": "StandardSection",
"subsection": "3.1.1",
Expand All @@ -120,7 +120,7 @@ def test_document_todict(self) -> None:
)
nested_output = {
"id": "ASVS:SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING:3.1.1",
"doctype": defs.Credoctypes.Standard,
"doctype": defs.Credoctypes.Standard.value,
"name": "ASVS",
"section": "SESSION-MGT-TOKEN-DIRECTIVES-DISCRETE-HANDLING",
"subsection": "3.1.1",
Expand Down
8 changes: 6 additions & 2 deletions application/web/web_main.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,8 @@
from application.database import db
from application.cmd import cre_main
from application.defs import cre_defs as defs
from application.defs import osib_defs as odefs
from application.defs import cre_exceptions

from application.utils import spreadsheet as sheet_utils
from application.utils import mdutils, redirectors, gap_analysis
from application.prompt_client import prompt_client as prompt_client
Expand Down Expand Up @@ -733,7 +734,10 @@ def import_from_cre_csv() -> Any:
abort(400, "No file provided")
contents = file.read()
csv_read = csv.DictReader(contents.decode("utf-8").splitlines())
documents = spreadsheet_parsers.parse_export_format(list(csv_read))
try:
documents = spreadsheet_parsers.parse_export_format(list(csv_read))
except cre_exceptions.DuplicateLinkException as dle:
abort(500, f"error during parsing of the incoming CSV, err:{dle}")
cres = documents.pop(defs.Credoctypes.CRE.value)

standards = documents
Expand Down
10 changes: 2 additions & 8 deletions application/worker.py
Original file line number Diff line number Diff line change
@@ -1,9 +1,5 @@
import os
import redis
from rq import Worker, Queue, Connection
from application.database import db
import logging
from application.cmd.cre_main import db_connect
from application.utils import redis

logging.basicConfig()
Expand All @@ -13,10 +9,8 @@
listen = ["high", "default", "low"]


def start_worker(cache: str):
conn = redis.connect()
def start_worker():
logger.info(f"Worker Starting")
database = db_connect(path=cache)
with Connection(conn):
with Connection(redis.connect()):
worker = Worker(map(Queue, listen))
worker.work()
Loading