Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix: Fix duplication of logging messages #59

Merged
merged 5 commits into from
Feb 29, 2024
Merged
Show file tree
Hide file tree
Changes from 4 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
14 changes: 7 additions & 7 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ default_install_hook_types: [commit-msg, pre-commit]
default_stages: [commit, merge-commit]
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
rev: v4.5.0
hooks:
- id: check-added-large-files
- id: check-ast
Expand All @@ -26,11 +26,11 @@ repos:
- id: fix-byte-order-marker
- id: trailing-whitespace
- repo: https://github.com/psf/black-pre-commit-mirror
rev: 23.9.1
rev: 24.2.0
hooks:
- id: black
- repo: https://github.com/PyCQA/isort
rev: 5.12.0
rev: 5.13.2
hooks:
- id: isort
- repo: https://github.com/PyCQA/docformatter
Expand All @@ -47,14 +47,14 @@ repos:
additional_dependencies:
- pydocstyle[toml]
- repo: https://github.com/pre-commit/mirrors-mypy
rev: v1.5.1
rev: v1.8.0
hooks:
- id: mypy
additional_dependencies:
- types-requests
- types-PyYAML
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.5.1
rev: v1.5.5
hooks:
- id: insert-license
name: Insert license headers (shell-style comments)
Expand Down Expand Up @@ -97,10 +97,10 @@ repos:
- --comment-style
- "..| |"
- repo: https://github.com/fsfe/reuse-tool
rev: v2.1.0
rev: v3.0.1
hooks:
- id: reuse
- repo: https://github.com/qoomon/git-conventional-commits
rev: v2.6.5
rev: v2.6.7
hooks:
- id: conventional-commits
1 change: 1 addition & 0 deletions capella2polarion/converters/data_session.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,6 +20,7 @@ class ConverterData:
capella_element: diagram.Diagram | common.GenericElement
work_item: dm.CapellaWorkItem | None = None
description_references: list[str] = dataclasses.field(default_factory=list)
errors: set[str] = dataclasses.field(default_factory=set)


ConverterSession = dict[str, ConverterData]
93 changes: 55 additions & 38 deletions capella2polarion/converters/element_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -77,28 +77,6 @@ def _format(texts: list[str]) -> dict[str, str]:
return requirement_types


def _get_requirement_types_text(
obj: common.GenericElement,
) -> dict[str, dict[str, str]]:
type_texts = collections.defaultdict(list)
for req in getattr(obj, "requirements", []):
if req is None:
logger.error(
"RequirementsRelation with broken target found %r", obj.name
)
continue

if not (req.type and req.text):
identifier = req.long_name or req.name or req.summary or req.uuid
logger.warning(
"Requirement without text or type found %r", identifier
)
continue

type_texts[req.type.long_name].append(req.text)
return _format_texts(type_texts)


def _condition(
html: bool, value: str
) -> data_models.CapellaWorkItem.Condition:
Expand Down Expand Up @@ -163,15 +141,22 @@ def serialize(self, uuid: str) -> data_models.CapellaWorkItem | None:
] = getattr(self, f"_{converter}")
serializer(converter_data, **params)
except Exception as error:
logger.error(
"Serializing model element %r failed. %s",
uuid,
error.args[0],
)
converter_data.errors.add(error.args[0])
converter_data.work_item = None
return None # Force to not overwrite on failure
assert converter_data.work_item is not None

if converter_data.errors:
log_args = (
converter_data.capella_element._short_repr_(),
"\n\t".join(converter_data.errors),
)
if converter_data.work_item is None:
logger.error("Serialization of %r failed:\n\t %s", *log_args)
else:
logger.warning(
"Serialization of %r successful, but with warnings:"
"\n\t %s",
*log_args,
)
return converter_data.work_item

# General helper functions
Expand Down Expand Up @@ -265,7 +250,9 @@ def _sanitize_linked_text(
obj, "specification", {"capella:linkedText": markupsafe.Markup("")}
)["capella:linkedText"]
linked_text = RE_DESCR_DELETED_PATTERN.sub(
lambda match: strike_through(self._replace_markup(match, [])),
lambda match: strike_through(
self._replace_markup(obj.uuid, match, [])
),
linked_text,
)
linked_text = linked_text.replace("\n", "<br>")
Expand All @@ -278,7 +265,9 @@ def _sanitize_text(
]:
referenced_uuids: list[str] = []
replaced_markup = RE_DESCR_LINK_PATTERN.sub(
lambda match: self._replace_markup(match, referenced_uuids, 2),
lambda match: self._replace_markup(
obj.uuid, match, referenced_uuids, 2
),
text,
)

Expand Down Expand Up @@ -315,10 +304,8 @@ def repair_images(node: etree._Element) -> None:
node.attrib["src"] = f"workitemimg:{file_name}"

except FileNotFoundError:
logger.error(
"Inline image can't be found from %r for %r",
file_path,
obj._short_repr_(),
self.converter_session[obj.uuid].errors.add(
f"Inline image can't be found from {file_path!r}."
)

repaired_markup = chelpers.process_html_fragments(
Expand All @@ -328,6 +315,7 @@ def repair_images(node: etree._Element) -> None:

def _replace_markup(
self,
origin_uuid: str,
match: re.Match,
referenced_uuids: list[str],
default_group: int = 1,
Expand All @@ -341,14 +329,43 @@ def _replace_markup(
try:
self.model.by_uuid(uuid)
except KeyError:
logger.error("Found link to non-existing model element: %r", uuid)
self.converter_session[origin_uuid].errors.add(
"Non-existing model element referenced in description"
)
return strike_through(match.group(default_group))
if pid := self.capella_polarion_mapping.get_work_item_id(uuid):
referenced_uuids.append(uuid)
return POLARION_WORK_ITEM_URL.format(pid=pid)
logger.warning("Found reference to non-existing work item: %r", uuid)

self.converter_session[origin_uuid].errors.add(
"Non-existing work item referenced in description"
)
return match.group(default_group)

def _get_requirement_types_text(
self,
obj: common.GenericElement,
) -> dict[str, dict[str, str]]:
type_texts = collections.defaultdict(list)
for req in getattr(obj, "requirements", []):
if req is None:
self.converter_session[obj.uuid].errors.add(
"Found RequirementsRelation with broken target"
)
continue

if not (req.type and req.text):
identifier = (
req.long_name or req.name or req.summary or req.uuid
)
self.converter_session[obj.uuid].errors.add(
f"Found Requirement without text or type on {identifier!r}"
)
continue

type_texts[req.type.long_name].append(req.text)
return _format_texts(type_texts)

# Serializer implementation starts below

def __generic_work_item(
Expand All @@ -360,7 +377,7 @@ def __generic_work_item(
obj, raw_description or markupsafe.Markup("")
)
converter_data.description_references = uuids
requirement_types = _get_requirement_types_text(obj)
requirement_types = self._get_requirement_types_text(obj)
converter_data.work_item = data_models.CapellaWorkItem(
id=work_item_id,
type=converter_data.type_config.p_type,
Expand Down
6 changes: 3 additions & 3 deletions capella2polarion/converters/model_converter.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,9 +54,9 @@ def read_model(
if type_config := config.get_type_config(
layer, c_type, **attributes
):
self.converter_session[
obj.uuid
] = data_session.ConverterData(layer, type_config, obj)
self.converter_session[obj.uuid] = (
data_session.ConverterData(layer, type_config, obj)
)
else:
missing_types.add((layer, c_type, attributes))

Expand Down
Loading
Loading