From 3470a5991516a86429607e29a6db81870873813f Mon Sep 17 00:00:00 2001 From: Michael Harbarth Date: Wed, 10 Jan 2024 11:32:25 +0100 Subject: [PATCH] feat: additional refactoring and fixed all tests --- capella2polarion/__main__.py | 43 +- capella2polarion/cli.py | 90 +--- .../converters/converter_config.py | 43 +- ...verter_data_session.py => data_session.py} | 6 +- .../converters/element_converter.py | 119 ++---- capella2polarion/converters/link_converter.py | 22 +- capella2polarion/worker.py | 258 +++++------ tests/__init__.py | 3 - tests/conftest.py | 2 +- tests/data/model_elements/new_config.yaml | 26 +- tests/test_cli.py | 12 +- tests/test_elements.py | 400 +++++++++--------- 12 files changed, 448 insertions(+), 576 deletions(-) rename capella2polarion/converters/{converter_data_session.py => data_session.py} (80%) diff --git a/capella2polarion/__main__.py b/capella2polarion/__main__.py index 2d3eef4a..a40c40f0 100644 --- a/capella2polarion/__main__.py +++ b/capella2polarion/__main__.py @@ -11,10 +11,8 @@ import click from capellambse import cli_helpers -from capella2polarion import data_models from capella2polarion import worker as pw from capella2polarion.cli import Capella2PolarionCli -from capella2polarion.converters import element_converter logger = logging.getLogger(__name__) @@ -104,42 +102,29 @@ def synchronize(ctx: click.core.Context) -> None: capella_to_polarion_cli.polarion_params.project_id, ) capella_to_polarion_cli.load_synchronize_config() - capella_to_polarion_cli.load_roles_from_synchronize_config() capella_to_polarion_cli.load_capella_diagramm_cache_index() - polarion_worker = pw.PolarionWorker( - capella_to_polarion_cli.polarion_params, - capella_to_polarion_cli.capella_model, - element_converter.resolve_element_type, - ) + assert ( capella_to_polarion_cli.capella_diagram_cache_index_content is not None ) - polarion_worker.load_elements_and_type_map( - capella_to_polarion_cli.synchronize_config_content, + + assert capella_to_polarion_cli.config is not None + + polarion_worker = pw.CapellaPolarionWorker( + capella_to_polarion_cli.polarion_params, + capella_to_polarion_cli.capella_model, + capella_to_polarion_cli.config, capella_to_polarion_cli.capella_diagram_cache_index_content, + capella_to_polarion_cli.capella_diagram_cache_folder_path, ) + polarion_worker.generate_converter_session() - polarion_worker.fill_xtypes() polarion_worker.load_polarion_work_item_map() - description_references: typing.Any = {} - new_work_items: dict[str, data_models.CapellaWorkItem] - new_work_items = polarion_worker.create_work_items( - capella_to_polarion_cli.capella_diagram_cache_folder_path, - capella_to_polarion_cli.capella_model, - description_references, - ) + polarion_worker.create_work_items() polarion_worker.delete_work_items() - polarion_worker.post_work_items(new_work_items) - new_work_items = polarion_worker.create_work_items( - capella_to_polarion_cli.capella_diagram_cache_folder_path, - capella_to_polarion_cli.capella_model, - description_references, - ) - polarion_worker.patch_work_items( - new_work_items, - description_references, - capella_to_polarion_cli.synchronize_config_roles, - ) + polarion_worker.post_work_items() + polarion_worker.create_work_items() + polarion_worker.patch_work_items() if __name__ == "__main__": diff --git a/capella2polarion/cli.py b/capella2polarion/cli.py index 5597929e..70eb49ac 100644 --- a/capella2polarion/cli.py +++ b/capella2polarion/cli.py @@ -7,13 +7,12 @@ import logging import pathlib import typing -from itertools import chain import capellambse import click -import yaml from capella2polarion import worker as pw +from capella2polarion.converters import converter_config logger = logging.getLogger(__name__) @@ -50,6 +49,7 @@ def __init__( self.synchronize_config_content: dict[str, typing.Any] = {} self.synchronize_config_roles: dict[str, list[str]] | None = None self.echo = click.echo + self.config: converter_config.ConverterConfig | None = None def _none_save_value_string(self, value: str | None) -> str | None: return "None" if value is None else value @@ -93,7 +93,7 @@ def _value(value): self.echo(f"{lighted_member_var}: '{string_value}'") self.echo( f"""Capella Diagram Cache Index-File exits: {('YES' - if self.exits_capella_diagramm_cache_index_file() else 'NO')}""" + if self.exits_capella_diagram_cache_index_file() else 'NO')}""" ) self.echo( f"""Synchronize Config-IO is open: {('YES' @@ -126,95 +126,17 @@ def load_synchronize_config(self) -> None: raise RuntimeError("synchronize config io stream is closed ") if not self.synchronize_config_io.readable(): raise RuntimeError("synchronize config io stream is not readable") - self.synchronize_config_io.seek(0) - self.synchronize_config_content = yaml.safe_load( + self.config = converter_config.ConverterConfig( self.synchronize_config_io ) - def load_roles_from_synchronize_config(self) -> None: - """Fill SynchronizeConfigRoles and correct content.""" - if self.synchronize_config_content is None: - raise RuntimeError("first call loadSynchronizeConfig") - if special_config_asterix := self.synchronize_config_content.pop( - "*", [] - ): - special_config: dict[str, typing.Any] = {} - for typ in special_config_asterix: - if isinstance(typ, str): - special_config[typ] = None - else: - special_config.update(typ) - - lookup: dict[str, dict[str, list[str]]] = {} - for layer, xtypes in self.synchronize_config_content.items(): - for xt in xtypes: - if isinstance(xt, str): - item: dict[str, list[str]] = {xt: []} - else: - item = xt - - lookup.setdefault(layer, {}).update(item) - - new_config: dict[str, typing.Any] = {} - for layer, xtypes in self.synchronize_config_content.items(): - new_entries: list[str | dict[str, typing.Any]] = [] - for xtype in xtypes: - if isinstance(xtype, dict): - for sub_key, sub_value in xtype.items(): - new_value = ( - special_config.get("*", []) - + special_config.get(sub_key, []) - + sub_value - ) - new_entries.append({sub_key: new_value}) - else: - star = special_config.get("*", []) - special_xtype = special_config.get(xtype, []) - if new_value := star + special_xtype: - new_entries.append({xtype: new_value}) - else: - new_entries.append(xtype) - - wildcard_values = special_config.get("*", []) - for key, value in special_config.items(): - if key == "*": - continue - - if isinstance(value, list): - new_value = ( - lookup.get(layer, {}).get(key, []) - + wildcard_values - + value - ) - new_entries.append({key: new_value}) - elif value is None and key not in [ - entry - if isinstance(entry, str) - else list(entry.keys())[0] - for entry in new_entries - ]: - new_entries.append({key: wildcard_values}) - new_config[layer] = new_entries - self.synchronize_config_content = new_config - - roles: dict[str, list[str]] = {} - for typ in chain.from_iterable( - self.synchronize_config_content.values() - ): - if isinstance(typ, dict): - for key, role_ids in typ.items(): - roles[key] = list(role_ids) - else: - roles[typ] = [] - self.synchronize_config_roles = roles - def get_capella_diagram_cache_index_file_path(self) -> pathlib.Path: """Return index file path.""" if self.capella_diagram_cache_folder_path is None: raise ValueError("CapellaDiagramCacheFolderPath not filled") return self.capella_diagram_cache_folder_path / "index.json" - def exits_capella_diagramm_cache_index_file(self) -> bool: + def exits_capella_diagram_cache_index_file(self) -> bool: """Test existens of file.""" return ( False @@ -224,7 +146,7 @@ def exits_capella_diagramm_cache_index_file(self) -> bool: def load_capella_diagramm_cache_index(self) -> None: """Load to CapellaDiagramCacheIndexContent.""" - if not self.exits_capella_diagramm_cache_index_file(): + if not self.exits_capella_diagram_cache_index_file(): raise ValueError("capella diagramm cache index file doe not exits") self.capella_diagram_cache_index_content = [] if self.get_capella_diagram_cache_index_file_path() is not None: diff --git a/capella2polarion/converters/converter_config.py b/capella2polarion/converters/converter_config.py index be8f372d..4ff9df94 100644 --- a/capella2polarion/converters/converter_config.py +++ b/capella2polarion/converters/converter_config.py @@ -5,6 +5,7 @@ import dataclasses import typing +from collections import abc as cabc import yaml @@ -31,12 +32,25 @@ def __init__(self, synchronize_config: typing.TextIO): config_dict = yaml.safe_load(synchronize_config) self._layer_configs: dict[str, dict[str, list[CapellaTypeConfig]]] = {} self._global_configs: dict[str, CapellaTypeConfig] = {} + self.polarion_types = set[str]() + self.diagram_config: CapellaTypeConfig | None = None + # We handle the cross layer config separately as global_configs global_config_dict = config_dict.pop("*", {}) all_type_config = global_config_dict.pop("*", {}) global_links = all_type_config.get("links", []) self.__global_config = CapellaTypeConfig(links=global_links) + if "Diagram" in global_config_dict: + diagram_config = global_config_dict.pop("Diagram") or {} + p_type = diagram_config.get("polarion_type") or "diagram" + self.polarion_types.add(p_type) + self.diagram_config = CapellaTypeConfig( + p_type, + diagram_config.get("serializer"), + diagram_config.get("links", []) + global_links, + ) + def _read_capella_type_configs(conf: dict | list | None) -> list[dict]: if conf is None: return [{}] @@ -53,9 +67,12 @@ def _read_capella_type_configs(conf: dict | list | None) -> list[dict]: for c_type, type_config in global_config_dict.items(): type_config = type_config or {} + p_type = type_config.get( + "polarion_type" + ) or _default_type_conversion(c_type) + self.polarion_types.add(p_type) self._global_configs[c_type] = CapellaTypeConfig( - type_config.get("polarion_type") - or _default_type_conversion(c_type), + p_type, type_config.get("serializer"), type_config.get("links", []) + global_links, type_config.get("actor"), @@ -63,6 +80,7 @@ def _read_capella_type_configs(conf: dict | list | None) -> list[dict]: ) for layer, type_configs in config_dict.items(): + type_configs = type_configs or {} self._layer_configs[layer] = {} for c_type, c_type_config in type_configs.items(): type_configs = _read_capella_type_configs(c_type_config) @@ -77,11 +95,15 @@ def _read_capella_type_configs(conf: dict | list | None) -> list[dict]: ) or self.__global_config ) + p_type = ( + type_config.get("polarion_type") + or closest_config.p_type + or _default_type_conversion(c_type) + ) + self.polarion_types.add(p_type) self._layer_configs[layer][c_type].append( CapellaTypeConfig( - type_config.get("polarion_type") - or closest_config.p_type - or _default_type_conversion(c_type), + p_type, type_config.get("serializer") or closest_config.converter, type_config.get("links", []) @@ -99,6 +121,8 @@ def get_type_config( nature: str | None = None, ) -> CapellaTypeConfig | None: """Get the type config for a given layer and capella_type.""" + if layer not in self._layer_configs: + return None layer_configs = self._layer_configs.get(layer, {}).get(c_type) global_config = self._global_configs.get(c_type) if layer_configs: @@ -155,3 +179,12 @@ def __contains__( """Check if there is a config for a given layer and Capella type.""" layer, c_type, actor, nature = item return self.get_type_config(layer, c_type, actor, nature) is not None + + def layers_and_types(self) -> cabc.Iterator[tuple[str, str]]: + """Iterate all layers and types of the config.""" + for layer, layer_types in self._layer_configs.items(): + for c_type in layer_types: + yield layer, c_type + for c_type in self._global_configs: + if c_type not in layer_types: + yield layer, c_type diff --git a/capella2polarion/converters/converter_data_session.py b/capella2polarion/converters/data_session.py similarity index 80% rename from capella2polarion/converters/converter_data_session.py rename to capella2polarion/converters/data_session.py index 81f73ac2..46ef2914 100644 --- a/capella2polarion/converters/converter_data_session.py +++ b/capella2polarion/converters/data_session.py @@ -5,7 +5,7 @@ import dataclasses -from capellambse.model import GenericElement +from capellambse.model import common, diagram from capella2polarion import data_models as dm from capella2polarion.converters import converter_config @@ -17,9 +17,9 @@ class ConverterData: layer: str type_config: converter_config.CapellaTypeConfig - capella_element: GenericElement | None = None + capella_element: diagram.Diagram | common.GenericElement work_item: dm.CapellaWorkItem | None = None description_references: list[str] = dataclasses.field(default_factory=list) -ConverterSession: dict[str, ConverterData] +ConverterSession = dict[str, ConverterData] diff --git a/capella2polarion/converters/element_converter.py b/capella2polarion/converters/element_converter.py index 874d48a7..327cac43 100644 --- a/capella2polarion/converters/element_converter.py +++ b/capella2polarion/converters/element_converter.py @@ -10,18 +10,19 @@ import pathlib import re import typing as t +from collections import abc as cabc import capellambse import markupsafe from capellambse import helpers as chelpers from capellambse.model import common -from capellambse.model import diagram as diagr -from capellambse.model.crosslayer import capellacore, cs, interaction -from capellambse.model.layers import oa, pa +from capellambse.model.crosslayer import interaction +from capellambse.model.layers import oa from lxml import etree from capella2polarion import data_models from capella2polarion.connectors import polarion_repo +from capella2polarion.converters import data_session RE_DESCR_LINK_PATTERN = re.compile( r"([^<]+)<\/a>" @@ -37,19 +38,6 @@ "" ) -SERIALIZERS: dict[str, str] = { - "CapabilityRealization": "include_pre_and_post_condition", - "Capability": "include_pre_and_post_condition", - "LogicalComponent": "include_actor_in_type", - "OperationalCapability": "include_pre_and_post_condition", - "PhysicalComponent": "include_nature_in_type", - "SystemComponent": "include_actor_in_type", - "Scenario": "include_pre_and_post_condition", - "Constraint": "linked_text_as_description", - "SystemCapability": "include_pre_and_post_condition", -} - - PrePostConditionElement = t.Union[ oa.OperationalCapability, interaction.Scenario ] @@ -135,51 +123,50 @@ class CapellaWorkItemSerializer: """The general serializer class for CapellaWorkItems.""" diagram_cache_path: pathlib.Path - polarion_type_map: dict[str, str] - capella_polarion_mapping: polarion_repo.PolarionDataRepository model: capellambse.MelodyModel - descr_references: dict[str, list[str]] - serializer_mapping: dict[str, str] def __init__( self, diagram_cache_path: pathlib.Path, - polarion_type_map: dict[str, str], model: capellambse.MelodyModel, capella_polarion_mapping: polarion_repo.PolarionDataRepository, - descr_references: dict[str, list[str]], - serializer_mapping: dict[str, str] | None = None, + converter_session: data_session.ConverterSession, ): self.diagram_cache_path = diagram_cache_path - self.polarion_type_map = polarion_type_map self.model = model self.capella_polarion_mapping = capella_polarion_mapping - self.descr_references = descr_references - self.serializer_mapping = serializer_mapping or SERIALIZERS + self.converter_session = converter_session + + def serialize_all(self): + """Serialize all items of the converter_session.""" + work_items = [self.serialize(uuid) for uuid in self.converter_session] + return list(filter(None, work_items)) def serialize( - self, obj: diagr.Diagram | common.GenericElement + self, + uuid: str, ) -> data_models.CapellaWorkItem | None: """Return a CapellaWorkItem for the given diagram or element.""" + converter_data = self.converter_session[uuid] try: - if isinstance(obj, diagr.Diagram): - return self._diagram(obj) - else: - xtype = self.polarion_type_map.get( - obj.uuid, type(obj).__name__ - ) - serializer = getattr( - self, - f"_{self.serializer_mapping.get(xtype)}", - self._generic_work_item, - ) - return serializer(obj) + serializer: cabc.Callable[ + [data_session.ConverterData], data_models.CapellaWorkItem + ] = getattr( + self, + f"_{converter_data.type_config.converter}", + self._generic_work_item, + ) + converter_data.work_item = serializer(converter_data) + return converter_data.work_item except Exception as error: logger.error("Serializing model element failed. %s", error.args[0]) return None - def _diagram(self, diag: diagr.Diagram) -> data_models.CapellaWorkItem: + def _diagram( + self, converter_data: data_session.ConverterData + ) -> data_models.CapellaWorkItem: """Serialize a diagram for Polarion.""" + diag = converter_data.capella_element diagram_path = self.diagram_cache_path / f"{diag.uuid}.svg" src = _decode_diagram(diagram_path) style = "; ".join( @@ -189,7 +176,7 @@ def _diagram(self, diag: diagr.Diagram) -> data_models.CapellaWorkItem: f'

' ) return data_models.CapellaWorkItem( - type="diagram", + type=converter_data.type_config.p_type, title=diag.name, description_type="text/html", description=description, @@ -198,15 +185,15 @@ def _diagram(self, diag: diagr.Diagram) -> data_models.CapellaWorkItem: ) def _generic_work_item( - self, obj: common.GenericElement + self, converter_data: data_session.ConverterData ) -> data_models.CapellaWorkItem: - xtype = self.polarion_type_map.get(obj.uuid, type(obj).__name__) + obj = converter_data.capella_element raw_description = getattr(obj, "description", markupsafe.Markup("")) uuids, value = self._sanitize_description(obj, raw_description) - self.descr_references[obj.uuid] = uuids + converter_data.description_references = uuids requirement_types = _get_requirement_types_text(obj) return data_models.CapellaWorkItem( - type=resolve_element_type(xtype), + type=converter_data.type_config.p_type, title=obj.name, description_type="text/html", description=value, @@ -276,9 +263,12 @@ def _replace_markup( return match.group(default_group) def _include_pre_and_post_condition( - self, obj: PrePostConditionElement + self, converter_data: data_session.ConverterData ) -> data_models.CapellaWorkItem: """Return generic attributes and pre- and post-condition.""" + obj = converter_data.capella_element + assert hasattr(obj, "precondition"), "Missing PreCondition Attribute" + assert hasattr(obj, "postcondition"), "Missing PostCondition Attribute" def get_condition(cap: PrePostConditionElement, name: str) -> str: if not (condition := getattr(cap, name)): @@ -288,7 +278,7 @@ def get_condition(cap: PrePostConditionElement, name: str) -> str: def matcher(match: re.Match) -> str: return strike_through(self._replace_markup(match, [])) - work_item = self._generic_work_item(obj) + work_item = self._generic_work_item(converter_data) pre_condition = RE_DESCR_DELETED_PATTERN.sub( matcher, get_condition(obj, "precondition") ) @@ -302,44 +292,21 @@ def matcher(match: re.Match) -> str: return work_item def _get_linked_text( - self, obj: capellacore.Constraint + self, converter_data: data_session.ConverterData ) -> markupsafe.Markup: """Return sanitized markup of the given ``obj`` linked text.""" + obj = converter_data.capella_element description = obj.specification["capella:linkedText"].striptags() uuids, value = self._sanitize_description(obj, description) if uuids: - self.descr_references[obj.uuid] = uuids + converter_data.description_references = uuids return value def _linked_text_as_description( - self, obj: capellacore.Constraint + self, converter_data: data_session.ConverterData ) -> data_models.CapellaWorkItem: """Return attributes for a ``Constraint``.""" - work_item = self._generic_work_item(obj) - # pylint: disable-next=attribute-defined-outside-init - work_item.description = self._get_linked_text(obj) - return work_item - - def _include_actor_in_type( - self, obj: cs.Component - ) -> data_models.CapellaWorkItem: - """Return attributes for a ``Component``.""" - work_item = self._generic_work_item(obj) - if obj.is_actor: - xtype = RE_CAMEL_CASE_2ND_WORD_PATTERN.sub( - r"\1Actor", type(obj).__name__ - ) - # pylint: disable-next=attribute-defined-outside-init - work_item.type = resolve_element_type(xtype) - return work_item - - def _include_nature_in_type( - self, obj: pa.PhysicalComponent - ) -> data_models.CapellaWorkItem: - """Return attributes for a ``PhysicalComponent``.""" - work_item = self._include_actor_in_type(obj) - xtype = work_item.type - nature = [obj.nature.name, ""][obj.nature == "UNSET"] + work_item = self._generic_work_item(converter_data) # pylint: disable-next=attribute-defined-outside-init - work_item.type = f"{xtype}{nature.capitalize()}" + work_item.description = self._get_linked_text(converter_data) return work_item diff --git a/capella2polarion/converters/link_converter.py b/capella2polarion/converters/link_converter.py index 99eaccd1..89e1b745 100644 --- a/capella2polarion/converters/link_converter.py +++ b/capella2polarion/converters/link_converter.py @@ -17,7 +17,7 @@ from capella2polarion import data_models from capella2polarion.connectors import polarion_repo -from capella2polarion.converters import element_converter +from capella2polarion.converters import data_session, element_converter logger = logging.getLogger(__name__) @@ -30,14 +30,12 @@ class LinkSerializer: def __init__( self, capella_polarion_mapping: polarion_repo.PolarionDataRepository, - new_work_items: dict[str, data_models.CapellaWorkItem], - description_references: dict[str, list[str]], + converter_session: data_session.ConverterSession, project_id: str, model: capellambse.MelodyModel, ): self.capella_polarion_mapping = capella_polarion_mapping - self.new_work_items = new_work_items - self.description_references = description_references + self.converter_session = converter_session self.project_id = project_id self.model = model @@ -59,20 +57,20 @@ def __init__( } def create_links_for_work_item( - self, - obj: common.GenericElement | diag.Diagram, - roles, + self, uuid: str ) -> list[polarion_api.WorkItemLink]: """Create work item links for a given Capella object.""" + converter_data = self.converter_session[uuid] + obj = converter_data.capella_element if isinstance(obj, diag.Diagram): repres = f"" else: repres = obj._short_repr_() - work_item = self.new_work_items[obj.uuid] + work_item = converter_data.work_item + assert work_item is not None new_links: list[polarion_api.WorkItemLink] = [] - typ = work_item.type[0].upper() + work_item.type[1:] - for role_id in roles.get(typ, []): + for role_id in converter_data.type_config.links: if serializer := self.serializers.get(role_id): new_links.extend(serializer(obj, work_item.id, role_id, {})) else: @@ -123,7 +121,7 @@ def _handle_description_reference_links( role_id: str, links: dict[str, polarion_api.WorkItemLink], ) -> list[polarion_api.WorkItemLink]: - refs = self.description_references.get(obj.uuid, []) + refs = self.converter_session[obj.uuid].description_references ref_set = set(self._get_work_item_ids(work_item_id, refs, role_id)) return self._create(work_item_id, role_id, ref_set, links) diff --git a/capella2polarion/worker.py b/capella2polarion/worker.py index 29abee83..14a2dd9a 100644 --- a/capella2polarion/worker.py +++ b/capella2polarion/worker.py @@ -7,44 +7,23 @@ import logging import pathlib import typing -from itertools import chain +from typing import Optional from urllib import parse import capellambse import polarion_rest_api_client as polarion_api -from capellambse.model import common from capella2polarion import data_models from capella2polarion.connectors import polarion_repo -from capella2polarion.converters import element_converter, link_converter +from capella2polarion.converters import ( + converter_config, + data_session, + element_converter, + link_converter, +) logger = logging.getLogger(__name__) -# STATUS_DELETE = "deleted" -ACTOR_TYPES = { - "LogicalActor": "LogicalComponent", - "SystemActor": "SystemComponent", - "PhysicalActor": "PhysicalComponent", -} -PHYSICAL_COMPONENT_TYPES = { - "PhysicalComponentNode": "PhysicalComponent", - "PhysicalActorNode": "PhysicalComponent", - "PhysicalComponentBehavior": "PhysicalComponent", - "PhysicalActorBehavior": "PhysicalComponent", -} -POL2CAPELLA_TYPES: dict[str, str] = ( - { - "OperationalEntity": "Entity", - "OperationalInteraction": "FunctionalExchange", - "SystemCapability": "Capability", - } - | ACTOR_TYPES - | PHYSICAL_COMPONENT_TYPES -) -TYPES_POL2CAPELLA = { - ctype: ptype for ptype, ctype in POL2CAPELLA_TYPES.items() -} - class PolarionWorkerParams: """Container for Polarion Params.""" @@ -58,23 +37,25 @@ def __init__( self.delete_work_items = delete_work_items -class PolarionWorker: - """PolarionWorker encapsulate the Polarion API Client work.""" +class CapellaPolarionWorker: + """CapellaPolarionWorker encapsulate the Polarion API Client work.""" def __init__( self, params: PolarionWorkerParams, model: capellambse.MelodyModel, - make_type_id: typing.Any, + config: converter_config.ConverterConfig, + diagram_idx: list[dict[str, typing.Any]], + diagram_cache_path: pathlib.Path, ) -> None: - self.polarion_params: PolarionWorkerParams = params - self.elements: dict[str, list[common.GenericElement]] = {} - self.polarion_type_map: dict[str, str] = {} # TODO refactor - self.capella_uuids: set[str] = set() # TODO refactor - self.x_types: set[str] = set() + self.polarion_params = params self.polarion_data_repo = polarion_repo.PolarionDataRepository() + self.converter_session: data_session.ConverterSession = {} self.model = model - self.make_type_id: typing.Any = make_type_id + self.config = config + self.diagram_idx = diagram_idx + self.diagram_cache_path = diagram_cache_path + if (self.polarion_params.project_id is None) or ( len(self.polarion_params.project_id) == 0 ): @@ -108,93 +89,58 @@ def _save_value_string(self, value: str | None) -> str | None: return "None" if value is None else value def check_client(self) -> None: - """Instantiate the polarion client, move to PolarionWorker Class.""" - + """Instantiate the polarion client as member.""" if not self.client.project_exists(): raise KeyError( f"Miss Polarion project with id " f"{self._save_value_string(self.polarion_params.project_id)}" ) - def load_elements_and_type_map( + def generate_converter_session( self, - config: dict[str, typing.Any], - diagram_idx: list[dict[str, typing.Any]], ) -> None: """Return an elements and UUID to Polarion type map.""" - convert_type = POL2CAPELLA_TYPES - type_map: dict[str, str] = {} - elements: dict[str, list[common.GenericElement]] = {} - for _below, pol_types in config.items(): - below = getattr(self.model, _below) - for typ in pol_types: - if isinstance(typ, dict): - typ = list(typ.keys())[0] - - if typ == "Diagram": - continue - - xtype = convert_type.get(typ, typ) - objects = self.model.search(xtype, below=below) - elements.setdefault(typ, []).extend(objects) - for obj in objects: - type_map[obj.uuid] = typ - - for typ, xtype in ACTOR_TYPES.items(): - if typ not in elements: + missing_types = set[tuple[str, str, Optional[bool], Optional[str]]]() + for layer, c_type in self.config.layers_and_types(): + below = getattr(self.model, layer) + if c_type == "Diagram": continue - actors: list[common.GenericElement] = [] - components: list[common.GenericElement] = [] - for obj in elements[typ]: - if obj.is_actor: - actors.append(obj) + objects = self.model.search(c_type, below=below) + for obj in objects: + actor = None if not hasattr(obj, "is_actor") else obj.is_actor + nature = None if not hasattr(obj, "nature") else obj.nature + if config := self.config.get_type_config( + layer, c_type, actor, nature + ): + self.converter_session[ + obj.uuid + ] = data_session.ConverterData(layer, config, obj) else: - components.append(obj) - type_map[obj.uuid] = xtype - - elements[typ] = actors - elements[xtype] = components - - nature_mapping: dict[str, tuple[list[common.GenericElement], str]] = { - "UNSET": ([], "PhysicalComponent"), - "NODE": ([], "PhysicalComponentNode"), - "BEHAVIOR": ([], "PhysicalComponentBehavior"), - "NODE_actor": ([], "PhysicalActorNode"), - "BEHAVIOR_actor": ([], "PhysicalActorBehavior"), - } - for obj in elements.get("PhysicalComponent", []): - postfix = "_actor" if obj.is_actor else "" - container, xtype = nature_mapping[f"{str(obj.nature)}{postfix}"] - container.append(obj) - type_map[obj.uuid] = xtype - - for container, xtype in nature_mapping.values(): - if container: - elements[xtype] = container - - diagrams_from_cache = {d["uuid"] for d in diagram_idx if d["success"]} - elements["Diagram"] = [ - d for d in self.model.diagrams if d.uuid in diagrams_from_cache - ] - for obj in elements["Diagram"]: - type_map[obj.uuid] = "Diagram" - self.elements = elements - self.polarion_type_map = type_map - self.capella_uuids = set(self.polarion_type_map) - - def fill_xtypes(self): - """Return a set of Polarion types from the current context.""" - xtypes = set[str]() - for obj in chain.from_iterable(self.elements.values()): - xtype = self.polarion_type_map.get(obj.uuid, type(obj).__name__) - xtypes.add(self.make_type_id(xtype)) - self.x_types = xtypes + missing_types.add((layer, c_type, actor, nature)) + + if self.config.diagram_config: + diagrams_from_cache = { + d["uuid"] for d in self.diagram_idx if d["success"] + } + for d in self.model.diagrams: + if d.uuid in diagrams_from_cache: + self.converter_session[ + d.uuid + ] = data_session.ConverterData( + "", self.config.diagram_config, d + ) + + if missing_types: + for missing_type in missing_types: + logger.warning( + "Capella type %r is configured in layer %r, but not for actor %r and nature %r.", + *missing_type, + ) def load_polarion_work_item_map(self): """Return a map from Capella UUIDs to Polarion work items.""" - work_item_types = list(map(self.make_type_id, self.x_types)) - _type = " ".join(work_item_types) + _type = " ".join(self.config.polarion_types) work_items = self.client.get_all_work_items( f"type:({_type})", @@ -205,28 +151,16 @@ def load_polarion_work_item_map(self): def create_work_items( self, - diagram_cache_path: pathlib.Path, - model, - descr_references: dict[str, list[str]], ) -> dict[str, data_models.CapellaWorkItem]: """Create a list of work items for Polarion.""" - objects = chain.from_iterable(self.elements.values()) - _work_items = [] serializer = element_converter.CapellaWorkItemSerializer( - diagram_cache_path, - self.polarion_type_map, - model, + self.diagram_cache_path, + self.model, self.polarion_data_repo, - descr_references, + self.converter_session, ) - for obj in objects: - _work_items.append(serializer.serialize(obj)) - - _work_items = list(filter(None, _work_items)) - valid_types = set(map(self.make_type_id, set(self.elements))) - work_items: list[data_models.CapellaWorkItem] = [] - missing_types: set[str] = set() - for work_item in _work_items: + work_items = serializer.serialize_all() + for work_item in work_items: assert work_item is not None assert work_item.title is not None assert work_item.type is not None @@ -234,16 +168,7 @@ def create_work_items( work_item.uuid_capella ): work_item.id = old.id - if work_item.type in valid_types: - work_items.append(work_item) - else: - missing_types.add(work_item.type) - if missing_types: - logger.debug( - "%r are missing in the capella2polarion configuration", - ", ".join(missing_types), - ) return {wi.uuid_capella: wi for wi in work_items} def delete_work_items(self) -> None: @@ -263,7 +188,7 @@ def serialize_for_delete(uuid: str) -> str: for uuid, _, work_item in self.polarion_data_repo.items() if work_item.status != "deleted" } - uuids: set[str] = existing_work_items - self.capella_uuids + uuids: set[str] = existing_work_items - set(self.converter_session) work_item_ids = [serialize_for_delete(uuid) for uuid in uuids] if work_item_ids: try: @@ -275,11 +200,19 @@ def serialize_for_delete(uuid: str) -> str: logger.error("Deleting work items failed. %s", error.args[0]) def post_work_items( - self, new_work_items: dict[str, data_models.CapellaWorkItem] + self, ) -> None: """Post work items in a Polarion project.""" missing_work_items: list[data_models.CapellaWorkItem] = [] - for work_item in new_work_items.values(): + for uuid, converter_data in self.converter_session.items(): + work_item = converter_data.work_item + if work_item is None: + logger.warning( + "Expected to find a WorkItem for %s, but there is none", + uuid, + ) + continue + if work_item.uuid_capella in self.polarion_data_repo: continue @@ -323,7 +256,7 @@ def patch_work_item( assert new.id is not None try: self.client.update_work_item(new) - if delete_link_ids := PolarionWorker.get_missing_link_ids( + if delete_link_ids := CapellaPolarionWorker.get_missing_link_ids( old.linked_work_items, new.linked_work_items ): id_list_str = ", ".join(delete_link_ids.keys()) @@ -337,7 +270,7 @@ def patch_work_item( list(delete_link_ids.values()) ) - if create_links := PolarionWorker.get_missing_link_ids( + if create_links := CapellaPolarionWorker.get_missing_link_ids( new.linked_work_items, old.linked_work_items ): id_list_str = ", ".join(create_links.keys()) @@ -363,10 +296,10 @@ def get_missing_link_ids( ) -> dict[str, polarion_api.WorkItemLink]: """Return an ID-Link dict of links present in left and not in right.""" left_id_map = { - PolarionWorker._get_link_id(link): link for link in left + CapellaPolarionWorker._get_link_id(link): link for link in left } right_id_map = { - PolarionWorker._get_link_id(link): link for link in right + CapellaPolarionWorker._get_link_id(link): link for link in right } return { lid: left_id_map[lid] @@ -386,44 +319,47 @@ def _get_link_id(link: polarion_api.WorkItemLink) -> str: def patch_work_items( self, - new_work_items: dict[str, data_models.CapellaWorkItem], - descr_references, - link_roles, ) -> None: """Update work items in a Polarion project.""" back_links: dict[str, list[polarion_api.WorkItemLink]] = {} link_serializer = link_converter.LinkSerializer( self.polarion_data_repo, - new_work_items, - descr_references, + self.converter_session, self.polarion_params.project_id, self.model, ) - for uuid in new_work_items: - objects = self.model - if uuid.startswith("_"): - objects = self.model.diagrams - obj = objects.by_uuid(uuid) + for uuid, converter_data in self.converter_session.items(): + if converter_data.work_item is None: + logger.warning( + "Expected to find a WorkItem for %s, but there is none", + uuid, + ) + continue + + links = link_serializer.create_links_for_work_item(uuid) + converter_data.work_item.linked_work_items = links - links = link_serializer.create_links_for_work_item( - obj, - link_roles, + link_converter.create_grouped_link_fields( + converter_data.work_item, back_links ) - work_item: data_models.CapellaWorkItem = new_work_items[uuid] - work_item.linked_work_items = links - link_converter.create_grouped_link_fields(work_item, back_links) + for uuid, converter_data in self.converter_session.items(): + if converter_data.work_item is None: + logger.warning( + "Expected to find a WorkItem for %s, but there is none", + uuid, + ) + continue - for uuid, new_work_item in new_work_items.items(): _, old_work_item = self.polarion_data_repo[uuid] if old_work_item.id in back_links: link_converter.create_grouped_back_link_fields( - new_work_item, back_links[old_work_item.id] + converter_data.work_item, back_links[old_work_item.id] ) self.patch_work_item( - new_work_item, + converter_data.work_item, old_work_item, ) diff --git a/tests/__init__.py b/tests/__init__.py index 5d62d4d0..dd5d085d 100644 --- a/tests/__init__.py +++ b/tests/__init__.py @@ -1,5 +1,2 @@ # Copyright DB InfraGO AG and contributors # SPDX-License-Identifier: Apache-2.0 - -# Copyright DB Netz AG and contributors -# SPDX-License-Identifier: Apache-2.0 diff --git a/tests/conftest.py b/tests/conftest.py index cbf1ea89..730b55de 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -17,7 +17,7 @@ TEST_DATA_ROOT = pathlib.Path(__file__).parent / "data" TEST_DIAGRAM_CACHE = TEST_DATA_ROOT / "diagram_cache" TEST_MODEL_ELEMENTS = TEST_DATA_ROOT / "model_elements" -TEST_MODEL_ELEMENTS_CONFIG = TEST_MODEL_ELEMENTS / "config.yaml" +TEST_MODEL_ELEMENTS_CONFIG = TEST_MODEL_ELEMENTS / "new_config.yaml" TEST_MODEL = TEST_DATA_ROOT / "model" / "Melody Model Test.aird" TEST_HOST = "https://api.example.com" diff --git a/tests/data/model_elements/new_config.yaml b/tests/data/model_elements/new_config.yaml index 4064bc24..a0e25a2b 100644 --- a/tests/data/model_elements/new_config.yaml +++ b/tests/data/model_elements/new_config.yaml @@ -4,8 +4,8 @@ "*": # All layers "*": # All class types links: - - parent # Specify workitem links - - description_reference # Custom attribute + - parent + - description_reference Class: links: - state_machines @@ -13,6 +13,12 @@ links: - diagram_elements Constraint: + serializer: linked_text_as_description + Scenario: + serializer: include_pre_and_post_condition + CapabilityRealization: + serializer: include_pre_and_post_condition + Entity: oa: # Specify below FunctionalExchange: @@ -25,16 +31,22 @@ pa: PhysicalComponent: - actor: false nature: null - polarion_type: PhysicalComponent + polarion_type: physicalComponent - actor: false nature: NODE - polarion_type: PhysicalComponentNode + polarion_type: physicalComponentNode - actor: false nature: BEHAVIOR - polarion_type: PhysicalComponentBehavior + polarion_type: physicalComponentBehavior - actor: true nature: NODE - polarion_type: PhysicalActorNode + polarion_type: physicalActorNode - actor: true nature: BEHAVIOR - polarion_type: PhysicalActorBehavior + polarion_type: physicalActorBehavior +la: + LogicalComponent: + - actor: true + polarion_type: logicalActor + - actor: false + polarion_type: logicalComponent diff --git a/tests/test_cli.py b/tests/test_cli.py index 580d7a42..5a4985ea 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -10,7 +10,7 @@ from click import testing import capella2polarion.__main__ as main -from capella2polarion.worker import PolarionWorker +from capella2polarion.worker import CapellaPolarionWorker # pylint: disable-next=relative-beyond-top-level, useless-suppression from tests.conftest import ( # type: ignore[import] @@ -25,19 +25,21 @@ def test_migrate_model_elements(monkeypatch: pytest.MonkeyPatch): monkeypatch.setattr(polarion_api, "OpenAPIPolarionProjectClient", mock_api) mock_get_polarion_wi_map = mock.MagicMock() monkeypatch.setattr( - PolarionWorker, "load_polarion_work_item_map", mock_get_polarion_wi_map + CapellaPolarionWorker, + "load_polarion_work_item_map", + mock_get_polarion_wi_map, ) mock_delete_work_items = mock.MagicMock() monkeypatch.setattr( - PolarionWorker, "delete_work_items", mock_delete_work_items + CapellaPolarionWorker, "delete_work_items", mock_delete_work_items ) mock_post_work_items = mock.MagicMock() monkeypatch.setattr( - PolarionWorker, "post_work_items", mock_post_work_items + CapellaPolarionWorker, "post_work_items", mock_post_work_items ) mock_patch_work_items = mock.MagicMock() monkeypatch.setattr( - PolarionWorker, "patch_work_items", mock_patch_work_items + CapellaPolarionWorker, "patch_work_items", mock_patch_work_items ) command: list[str] = [ diff --git a/tests/test_elements.py b/tests/test_elements.py index c497eef3..bb75d9a7 100644 --- a/tests/test_elements.py +++ b/tests/test_elements.py @@ -17,8 +17,14 @@ from capella2polarion import data_models from capella2polarion.cli import Capella2PolarionCli from capella2polarion.connectors import polarion_repo -from capella2polarion.converters import element_converter, link_converter -from capella2polarion.worker import PolarionWorker +from capella2polarion.converters import ( + converter_config, + data_session, + element_converter, + link_converter, +) +from capella2polarion.worker import CapellaPolarionWorker +from tests import conftest # pylint: disable-next=relative-beyond-top-level, useless-suppression from tests.conftest import ( # type: ignore[import] @@ -47,11 +53,6 @@ TEST_SCENARIO = "afdaa095-e2cd-4230-b5d3-6cb771a90f51" TEST_CAP_REAL = "b80b3141-a7fc-48c7-84b2-1467dcef5fce" TEST_CONSTRAINT = "95cbd4af-7224-43fe-98cb-f13dda540b8e" -TEST_POL_TYPE_MAP = { - TEST_ELEMENT_UUID: "LogicalComponent", - TEST_OCAP_UUID: "OperationalCapability", - TEST_WE_UUID: "Entity", -} TEST_DIAG_DESCR = ( '

" ) } +DIAGRAM_CONFIG = converter_config.CapellaTypeConfig("diagram", "diagram") class BaseObjectContainer: def __init__( self, cli: Capella2PolarionCli, - pw: PolarionWorker, - new_work_items: dict[str, data_models.CapellaWorkItem], + pw: CapellaPolarionWorker, ) -> None: self.c2pcli: Capella2PolarionCli = cli - self.pw: PolarionWorker = pw - self.new_work_items = new_work_items + self.pw: CapellaPolarionWorker = pw class TestDiagramElements: @@ -143,7 +143,7 @@ class TestDiagramElements: @pytest.fixture def base_object( diagram_cache_index: list[dict[str, typing.Any]], - model: capellambse.MelodyModel | None, + model: capellambse.MelodyModel, monkeypatch: pytest.MonkeyPatch, ) -> BaseObjectContainer: import io @@ -173,40 +173,29 @@ def write(self, text: str): monkeypatch.setattr( polarion_api, "OpenAPIPolarionProjectClient", mock_api ) - pw = PolarionWorker( + c2p_cli.config = mock.Mock(converter_config.ConverterConfig) + pw = CapellaPolarionWorker( c2p_cli.polarion_params, model, - element_converter.resolve_element_type, + c2p_cli.config, + c2p_cli.capella_diagram_cache_index_content, + c2p_cli.capella_diagram_cache_folder_path, ) - pw.capella_uuids = {d["uuid"] for d in diagram_cache_index} + pw.converter_session = { + TEST_DIAG_UUID: data_session.ConverterData( + "", DIAGRAM_CONFIG, model.diagrams.by_uuid(TEST_DIAG_UUID) + ) + } pw.polarion_data_repo = polarion_repo.PolarionDataRepository( [work_item] ) - pw.elements = {"Diagram": c2p_cli.capella_model.diagrams} - return BaseObjectContainer( - c2p_cli, - pw, - { - uuid: data_models.CapellaWorkItem( - id="Diag-1", - checksum="123", - uuid_capella=uuid, - type="fakeModelObject", - ) - }, - ) + return BaseObjectContainer(c2p_cli, pw) @staticmethod def test_create_diagrams(base_object: BaseObjectContainer): - c2p_cli = base_object.c2pcli pw = base_object.pw - description_reference: dict[str, list[str]] = {} new_work_items: dict[str, data_models.CapellaWorkItem] - new_work_items = pw.create_work_items( - c2p_cli.capella_diagram_cache_folder_path, - c2p_cli.capella_model, - description_reference, - ) + new_work_items = pw.create_work_items() assert len(new_work_items) == 1 work_item = new_work_items[TEST_DIAG_UUID] assert isinstance(work_item, data_models.CapellaWorkItem) @@ -220,20 +209,17 @@ def test_create_diagrams(base_object: BaseObjectContainer): def test_create_diagrams_filters_non_diagram_elements( base_object: BaseObjectContainer, ): - c2p_cli = base_object.c2pcli + # This test does not make any sense, but it also didn't before pw = base_object.pw - description_reference: dict[str, list[str]] = {} - pw.create_work_items( - c2p_cli.capella_diagram_cache_folder_path, - c2p_cli.capella_model, - description_reference, - ) + pw.create_work_items() assert pw.client.create_work_items.call_count == 0 @staticmethod def test_delete_diagrams(base_object: BaseObjectContainer): pw = base_object.pw - pw.capella_uuids = set() + pw.converter_session = {} + pw.create_work_items() + pw.post_work_items() pw.delete_work_items() assert pw.client is not None assert pw.client.delete_work_items.call_count == 1 @@ -293,7 +279,7 @@ def write(self, text: str): capella_model=model, synchronize_config_io=MyIO(), ) - c2p_cli.synchronize_config_roles = {"FakeModelObject": ["attribute"]} + c2p_cli.setup_logger() mock_api = mock.MagicMock( spec=polarion_api.OpenAPIPolarionProjectClient @@ -301,48 +287,47 @@ def write(self, text: str): monkeypatch.setattr( polarion_api, "OpenAPIPolarionProjectClient", mock_api ) - pw = PolarionWorker( + c2p_cli.config = mock.Mock(converter_config.ConverterConfig) + pw = CapellaPolarionWorker( c2p_cli.polarion_params, model, - element_converter.resolve_element_type, + c2p_cli.config, + c2p_cli.capella_diagram_cache_index_content, + c2p_cli.capella_diagram_cache_folder_path, ) pw.polarion_data_repo = polarion_repo.PolarionDataRepository( [work_item] ) - pw.polarion_type_map = {"uuid1": "FakeModelObject"} fake = FakeModelObject("uuid1", name="Fake 1") - pw.elements = { - "FakeModelObject": [ + fake_model_type_config = converter_config.CapellaTypeConfig( + "fakeModelObject", links=["attribute"] + ) + pw.converter_session = { + "uuid1": data_session.ConverterData( + "oa", + fake_model_type_config, fake, - FakeModelObject("uuid2", name="Fake 2", attribute=fake), - ], - "UnsupportedFakeModelObject": [ - UnsupportedFakeModelObject("uuid3") - ], - } - return BaseObjectContainer( - c2p_cli, - pw, - { - "uuid1": data_models.CapellaWorkItem( + data_models.CapellaWorkItem( id="Obj-1", uuid_capella="uuid1", status="open", checksum="123", type="fakeModelObject", - ) - }, - ) + ), + ), + "uuid2": data_session.ConverterData( + "oa", + fake_model_type_config, + FakeModelObject("uuid2", name="Fake 2", attribute=fake), + ), + } + return BaseObjectContainer(c2p_cli, pw) @staticmethod def test_create_work_items( monkeypatch: pytest.MonkeyPatch, base_object: BaseObjectContainer ): - del base_object.pw.elements["UnsupportedFakeModelObject"] base_object.c2pcli.capella_model = mock.MagicMock() - base_object.c2pcli.capella_model.by_uuid.side_effect = ( - base_object.pw.elements["FakeModelObject"] - ) monkeypatch.setattr( element_converter.CapellaWorkItemSerializer, "serialize", @@ -364,11 +349,7 @@ def test_create_work_items( description=markupsafe.Markup(""), ), ] - work_items = base_object.pw.create_work_items( - base_object.c2pcli.capella_diagram_cache_folder_path, - base_object.c2pcli.capella_model, - {}, - ) + work_items = base_object.pw.create_work_items() assert list(work_items.values()) == [expected, expected1] @staticmethod @@ -390,8 +371,15 @@ def test_create_work_items_with_special_polarion_type( _type: str, attrs: dict[str, typing.Any], ): - base_object.pw.elements = {_type: [model.by_uuid(uuid)]} - base_object.pw.polarion_type_map[uuid] = _type + base_object.pw.converter_session = { + uuid: data_session.ConverterData( + "oa", + converter_config.CapellaTypeConfig( + _type[0].lower() + _type[1:] + ), + model.by_uuid(uuid), + ) + } base_object.c2pcli.capella_model = model expected = data_models.CapellaWorkItem( @@ -403,17 +391,14 @@ def test_create_work_items_with_special_polarion_type( **attrs, ) - work_items = base_object.pw.create_work_items( - TEST_DIAGRAM_CACHE, model, {} - ) + work_items = base_object.pw.create_work_items() assert len(work_items) == 1 assert work_items[uuid] == expected @staticmethod def test_create_links_custom_resolver(base_object: BaseObjectContainer): - obj = base_object.pw.elements["FakeModelObject"][1] - obj_2 = data_models.CapellaWorkItem( + work_item_obj_2 = data_models.CapellaWorkItem( id="Obj-2", uuid_capella="uuid2", type="fakeModelObject", @@ -421,12 +406,14 @@ def test_create_links_custom_resolver(base_object: BaseObjectContainer): description=markupsafe.Markup(""), status="open", ) - base_object.pw.polarion_data_repo.update_work_items([obj_2]) - base_object.new_work_items["uuid2"] = obj_2 - base_object.c2pcli.synchronize_config_roles = { - "FakeModelObject": ["description_reference"] - } - description_reference = {"uuid2": ["uuid1"]} + base_object.pw.polarion_data_repo.update_work_items([work_item_obj_2]) + base_object.pw.converter_session["uuid2"].work_item = work_item_obj_2 + base_object.pw.converter_session["uuid2"].type_config.links = [ + "description_reference" + ] + base_object.pw.converter_session["uuid2"].description_references = [ + "uuid1" + ] expected = polarion_api.WorkItemLink( "Obj-2", "Obj-1", @@ -435,14 +422,12 @@ def test_create_links_custom_resolver(base_object: BaseObjectContainer): ) link_serializer = link_converter.LinkSerializer( base_object.pw.polarion_data_repo, - base_object.new_work_items, - description_reference, + base_object.pw.converter_session, base_object.pw.polarion_params.project_id, base_object.c2pcli.capella_model, ) links = link_serializer.create_links_for_work_item( - obj, - base_object.c2pcli.synchronize_config_roles, + "uuid2", ) assert links == [expected] @@ -453,17 +438,18 @@ def test_create_links_custom_exchanges_resolver( function_uuid = "ceffa011-7b66-4b3c-9885-8e075e312ffa" uuid = "1a414995-f4cd-488c-8152-486e459fb9de" - obj = base_object.c2pcli.capella_model.by_uuid(function_uuid) + funtion_obj = base_object.c2pcli.capella_model.by_uuid(function_uuid) + obj = base_object.c2pcli.capella_model.by_uuid(uuid) - obj_1 = data_models.CapellaWorkItem( + work_item_obj_1 = data_models.CapellaWorkItem( id="Obj-1", uuid_capella=function_uuid, - type=type(obj).__name__, + type=type(funtion_obj).__name__, description_type="text/html", description=markupsafe.Markup(""), status="open", ) - obj_2 = data_models.CapellaWorkItem( + work_item_obj_2 = data_models.CapellaWorkItem( id="Obj-2", uuid_capella=uuid, type="functionalExchange", @@ -472,9 +458,27 @@ def test_create_links_custom_exchanges_resolver( status="open", ) - base_object.pw.polarion_data_repo.update_work_items([obj_1, obj_2]) - base_object.new_work_items[function_uuid] = obj_1 - base_object.new_work_items[uuid] = obj_2 + base_object.pw.polarion_data_repo.update_work_items( + [work_item_obj_1, work_item_obj_2] + ) + base_object.pw.converter_session[ + function_uuid + ] = data_session.ConverterData( + "fa", + converter_config.CapellaTypeConfig( + type(funtion_obj).__name__, links=["input_exchanges"] + ), + funtion_obj, + work_item_obj_1, + ) + base_object.pw.converter_session[uuid] = data_session.ConverterData( + "fa", + converter_config.CapellaTypeConfig( + "functionalExchange", + ), + obj, + work_item_obj_2, + ) base_object.c2pcli.synchronize_config_roles = { "SystemFunction": ["input_exchanges"] @@ -487,22 +491,17 @@ def test_create_links_custom_exchanges_resolver( ) link_serializer = link_converter.LinkSerializer( base_object.pw.polarion_data_repo, - base_object.new_work_items, - {}, + base_object.pw.converter_session, base_object.pw.polarion_params.project_id, base_object.c2pcli.capella_model, ) - links = link_serializer.create_links_for_work_item( - obj, - base_object.c2pcli.synchronize_config_roles, - ) + links = link_serializer.create_links_for_work_item(function_uuid) assert links == [expected] @staticmethod def test_create_links_missing_attribute( base_object: BaseObjectContainer, caplog: pytest.LogCaptureFixture ): - obj = base_object.pw.elements["FakeModelObject"][0] expected = ( "Unable to create work item link 'attribute' for [Obj-1]. " "There is no 'attribute' attribute on " @@ -511,14 +510,12 @@ def test_create_links_missing_attribute( with caplog.at_level(logging.DEBUG): link_serializer = link_converter.LinkSerializer( base_object.pw.polarion_data_repo, - base_object.new_work_items, - {}, + base_object.pw.converter_session, base_object.pw.polarion_params.project_id, base_object.c2pcli.capella_model, ) links = link_serializer.create_links_for_work_item( - obj, - base_object.c2pcli.synchronize_config_roles, + "uuid1", ) assert not links assert caplog.messages[0] == expected @@ -536,7 +533,7 @@ def test_create_links_from_ElementList(base_object: BaseObjectContainer): FakeModelObject, ), ) - base_object.pw.elements["FakeModelObject"].append(obj) + fake_objects = {"uuid4": fake, "uuid5": fake1, "uuid6": obj} work_items = [ data_models.CapellaWorkItem( @@ -551,7 +548,14 @@ def test_create_links_from_ElementList(base_object: BaseObjectContainer): ] base_object.pw.polarion_data_repo.update_work_items(work_items) for work_item in work_items: - base_object.new_work_items[work_item.uuid_capella] = work_item + base_object.pw.converter_session[ + work_item.uuid_capella + ] = data_session.ConverterData( + "", + base_object.pw.converter_session["uuid1"].type_config, + fake_objects[work_item.uuid_capella], + work_item, + ) expected_link = polarion_api.WorkItemLink( "Obj-6", @@ -567,14 +571,12 @@ def test_create_links_from_ElementList(base_object: BaseObjectContainer): ) link_serializer = link_converter.LinkSerializer( base_object.pw.polarion_data_repo, - base_object.new_work_items, - {}, + base_object.pw.converter_session, base_object.pw.polarion_params.project_id, base_object.c2pcli.capella_model, ) links = link_serializer.create_links_for_work_item( - obj, - base_object.c2pcli.synchronize_config_roles, + "uuid6", ) # type: ignore[arg-type] assert expected_link in links @@ -584,7 +586,6 @@ def test_create_links_from_ElementList(base_object: BaseObjectContainer): def test_create_link_from_single_attribute( base_object: BaseObjectContainer, ): - obj = base_object.pw.elements["FakeModelObject"][1] work_item_2 = data_models.CapellaWorkItem( id="Obj-2", uuid_capella="uuid2", @@ -595,7 +596,7 @@ def test_create_link_from_single_attribute( ) base_object.pw.polarion_data_repo.update_work_items([work_item_2]) - base_object.new_work_items["uuid2"] = work_item_2 + base_object.pw.converter_session["uuid2"].work_item = work_item_2 expected = polarion_api.WorkItemLink( "Obj-2", @@ -605,14 +606,12 @@ def test_create_link_from_single_attribute( ) link_serializer = link_converter.LinkSerializer( base_object.pw.polarion_data_repo, - base_object.new_work_items, - {}, + base_object.pw.converter_session, base_object.pw.polarion_params.project_id, base_object.c2pcli.capella_model, ) links = link_serializer.create_links_for_work_item( - obj, - base_object.c2pcli.synchronize_config_roles, + "uuid2", ) assert links == [expected] @@ -639,26 +638,26 @@ def test_update_work_items( "get_all_work_items", polarion_api_get_all_work_items, ) + config = mock.Mock(converter_config.ConverterConfig) + config.polarion_types = set() + base_object.pw.config = config + base_object.pw.load_polarion_work_item_map() - work_items = { - "uuid1": data_models.CapellaWorkItem( - id="Obj-1", - uuid_capella="uuid1", - title="Fake 1", - type="type", - description_type="text/html", - description=markupsafe.Markup(""), - ) - } - base_object.c2pcli.capella_model = mock_model = mock.MagicMock() - base_object.pw.model = mock_model - mock_model.by_uuid.return_value = base_object.pw.elements[ - "FakeModelObject" - ][0] - base_object.pw.patch_work_items( - work_items, {}, base_object.c2pcli.synchronize_config_roles + base_object.pw.converter_session[ + "uuid1" + ].work_item = data_models.CapellaWorkItem( + id="Obj-1", + uuid_capella="uuid1", + title="Fake 1", + type="type", + description_type="text/html", + description=markupsafe.Markup(""), ) + + del base_object.pw.converter_session["uuid2"] + + base_object.pw.patch_work_items() assert base_object.pw.client is not None assert base_object.pw.client.get_all_work_item_links.call_count == 1 assert base_object.pw.client.delete_work_item_links.call_count == 0 @@ -689,24 +688,18 @@ def test_update_work_items_filters_work_items_with_same_checksum( ) ] ) - work_items = { - "uuid1": data_models.CapellaWorkItem( - id="Obj-1", - uuid_capella="uuid1", - status="open", - type="fakeModelObject", - ) - } - mock_model = mock.MagicMock() - mock_model.by_uuid.return_value = FakeModelObject( - "uuid1", name="Fake 1" + base_object.pw.converter_session[ + "uuid1" + ].work_item = data_models.CapellaWorkItem( + id="Obj-1", + uuid_capella="uuid1", + status="open", + type="fakeModelObject", ) - base_object.pw.model = mock_model + del base_object.pw.converter_session["uuid2"] - base_object.pw.patch_work_items( - work_items, {}, base_object.c2pcli.synchronize_config_roles - ) + base_object.pw.patch_work_items() assert base_object.pw.client is not None assert base_object.pw.client.update_work_item.call_count == 0 @@ -716,10 +709,8 @@ def test_update_links_with_no_elements(base_object: BaseObjectContainer): base_object.pw.polarion_data_repo = ( polarion_repo.PolarionDataRepository() ) - work_items: dict[str, data_models.CapellaWorkItem] = {} - base_object.pw.patch_work_items( - work_items, {}, base_object.c2pcli.synchronize_config_roles - ) + base_object.pw.converter_session = {} + base_object.pw.patch_work_items() assert base_object.pw.client.get_all_work_item_links.call_count == 0 @@ -740,36 +731,32 @@ def test_update_links(base_object: BaseObjectContainer): ) ] ) - work_items = { - "uuid1": data_models.CapellaWorkItem( - id="Obj-1", - uuid_capella="uuid1", - status="open", - type="fakeModelObject", - ), - "uuid2": data_models.CapellaWorkItem( - id="Obj-2", - uuid_capella="uuid2", - status="open", - type="fakeModelObject", - ), - } + base_object.pw.converter_session[ + "uuid1" + ].work_item = data_models.CapellaWorkItem( + id="Obj-1", + uuid_capella="uuid1", + status="open", + type="fakeModelObject", + ) + base_object.pw.converter_session[ + "uuid2" + ].work_item = data_models.CapellaWorkItem( + id="Obj-2", + uuid_capella="uuid2", + status="open", + type="fakeModelObject", + ) + assert base_object.pw.client is not None base_object.pw.client.get_all_work_item_links.side_effect = ( [link], [], ) - base_object.c2pcli.capella_model = mock_model = mock.MagicMock() - mock_model.by_uuid.side_effect = base_object.pw.elements[ - "FakeModelObject" - ] expected_new_link = polarion_api.WorkItemLink( "Obj-2", "Obj-1", "attribute", None, "project_id" ) - base_object.pw.model = mock_model - base_object.pw.patch_work_items( - work_items, {}, base_object.c2pcli.synchronize_config_roles - ) + base_object.pw.patch_work_items() assert base_object.pw.client is not None links = base_object.pw.client.get_all_work_item_links.call_args_list assert base_object.pw.client.get_all_work_item_links.call_count == 2 @@ -790,7 +777,15 @@ def test_patch_work_item_grouped_links( base_object: BaseObjectContainer, dummy_work_items: dict[str, data_models.CapellaWorkItem], ): - work_items = dummy_work_items + base_object.pw.converter_session = { + work_item.uuid_capella: data_session.ConverterData( + "", + converter_config.CapellaTypeConfig("fakeModelObject"), + FakeModelObject("uuid4", name="Fake 4"), + work_item, + ) + for work_item in dummy_work_items.values() + } base_object.pw.polarion_data_repo = ( polarion_repo.PolarionDataRepository( [ @@ -812,8 +807,8 @@ def test_patch_work_item_grouped_links( "create_links_for_work_item", mock_create_links, ) - mock_create_links.side_effect = lambda obj, *args: dummy_work_items[ - obj.uuid + mock_create_links.side_effect = lambda uuid, *args: dummy_work_items[ + uuid ].linked_work_items def mock_back_link(work_item, back_links): @@ -835,9 +830,7 @@ def mock_back_link(work_item, back_links): FakeModelObject(f"uuid{i}", name=f"Fake {i}") for i in range(3) ] base_object.pw.model = mock_model - base_object.pw.patch_work_items( - work_items, {}, base_object.c2pcli.synchronize_config_roles - ) + base_object.pw.patch_work_items() assert base_object.pw.client is not None update_work_item_calls = ( base_object.pw.client.update_work_item.call_args_list @@ -956,13 +949,16 @@ def test_diagram(model: capellambse.MelodyModel): serializer = element_converter.CapellaWorkItemSerializer( TEST_DIAGRAM_CACHE, - {}, model, polarion_repo.PolarionDataRepository(), - {}, + { + TEST_DIAG_UUID: data_session.ConverterData( + "", DIAGRAM_CONFIG, diag + ) + }, ) - serialized_diagram = serializer.serialize(diag) + serialized_diagram = serializer.serialize(TEST_DIAG_UUID) if serialized_diagram is not None: serialized_diagram.description = None @@ -985,9 +981,10 @@ def test__decode_diagram(): @staticmethod @pytest.mark.parametrize( - "uuid,expected", + "layer,uuid,expected", [ pytest.param( + "la", TEST_ELEMENT_UUID, { "type": "logicalComponent", @@ -1003,6 +1000,7 @@ def test__decode_diagram(): id="logicalComponent", ), pytest.param( + "oa", TEST_OCAP_UUID, { "type": "operationalCapability", @@ -1018,6 +1016,7 @@ def test__decode_diagram(): id="operationalCapability", ), pytest.param( + "oa", TEST_WE_UUID, { "type": "entity", @@ -1029,6 +1028,7 @@ def test__decode_diagram(): id="entity", ), pytest.param( + "la", TEST_ACTOR_UUID, { "type": "logicalActor", @@ -1043,6 +1043,7 @@ def test__decode_diagram(): id="logicalActor", ), pytest.param( + "pa", TEST_PHYS_COMP, { "type": "physicalComponent", @@ -1054,6 +1055,7 @@ def test__decode_diagram(): id="physicalComponent", ), pytest.param( + "pa", TEST_PHYS_NODE, { "type": "physicalComponentNode", @@ -1065,6 +1067,7 @@ def test__decode_diagram(): id="physicalComponentNode", ), pytest.param( + "oa", TEST_SCENARIO, { "type": "scenario", @@ -1080,6 +1083,7 @@ def test__decode_diagram(): id="scenario", ), pytest.param( + "la", TEST_CAP_REAL, { "type": "capabilityRealization", @@ -1095,6 +1099,7 @@ def test__decode_diagram(): id="capabilityRealization", ), pytest.param( + "oa", TEST_CONSTRAINT, { "type": "constraint", @@ -1111,14 +1116,23 @@ def test__decode_diagram(): ) def test_generic_work_item( model: capellambse.MelodyModel, + layer: str, uuid: str, expected: dict[str, typing.Any], ): obj = model.by_uuid(uuid) + with open(conftest.TEST_MODEL_ELEMENTS_CONFIG, "r") as f: + config = converter_config.ConverterConfig(f) + + c_type = type(obj).__name__ + actor = None if not hasattr(obj, "is_actor") else obj.is_actor + nature = None if not hasattr(obj, "nature") else obj.nature + + type_config = config.get_type_config(layer, c_type, actor, nature) + assert type_config is not None serializer = element_converter.CapellaWorkItemSerializer( pathlib.Path(""), - TEST_POL_TYPE_MAP, model, polarion_repo.PolarionDataRepository( [ @@ -1127,10 +1141,16 @@ def test_generic_work_item( ) ] ), - {}, + { + uuid: data_session.ConverterData( + layer, + type_config, + obj, + ) + }, ) - work_item = serializer.serialize(obj) + work_item = serializer.serialize(uuid) assert work_item is not None status = work_item.status work_item.status = None