diff --git a/.coveragerc b/.coveragerc new file mode 100644 index 00000000..3b6b33dc --- /dev/null +++ b/.coveragerc @@ -0,0 +1,6 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +[report] +exclude_lines = + @abc.abstractmethod diff --git a/.git_archival.txt b/.git_archival.txt new file mode 100644 index 00000000..1c1d2e81 --- /dev/null +++ b/.git_archival.txt @@ -0,0 +1,7 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: CC0-1.0 + +node: $Format:%H$ +node-date: $Format:%cI$ +describe-name: $Format:%(describe:tags=true)$ +ref-names: $Format:%D$ diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 00000000..de62da18 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,11 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +* text=auto + +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf + +*.{sh,py} text eol=lf + +.git_archival.txt export-subst diff --git a/.github/workflows/build-test-publish.yml b/.github/workflows/build-test-publish.yml new file mode 100644 index 00000000..e3e4cfa0 --- /dev/null +++ b/.github/workflows/build-test-publish.yml @@ -0,0 +1,76 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +name: Build + +on: + push: + branches: ["*"] + pull_request: [master] + tags: ["v*.*.*"] + +jobs: + test: + name: Test with Python ${{matrix.python_version}} on ${{matrix.os}} + runs-on: ${{matrix.os}} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest] + python_version: + - "3.10" + - "3.11" + include: + - os: windows-latest + python_version: "3.11" + steps: + - uses: actions/checkout@v3 + - name: Set up Python ${{matrix.python_version}} + uses: actions/setup-python@v3 + with: + python-version: ${{matrix.python_version}} + - uses: actions/cache@v3 + with: + path: ~/.cache/pip + key: ${{runner.os}}-pip-${{hashFiles('pyproject.toml')}} + restore-keys: | + ${{runner.os}}-pip- + ${{runner.os}}- + - name: Upgrade Pip + run: |- + python -m pip install -U pip + - name: Install test dependencies + run: |- + python -m pip install '.[test]' + - name: Run unit tests + run: |- + python -m pytest --cov-report=term --cov=capella2polarion --rootdir=. + + publish: + name: Publish artifacts + runs-on: ubuntu-latest + needs: test + steps: + - uses: actions/checkout@v3 + - name: Setup Python + uses: actions/setup-python@v3 + with: + python-version: "3.11" + - name: Install dependencies + run: |- + python -m pip install -U pip + python -m pip install build twine + - name: Build packages + run: |- + python -m build + - name: Verify packages + run: |- + python -m twine check dist/* + - name: Upload artifacts + uses: actions/upload-artifact@v3 + with: + name: Artifacts + path: 'dist/*' + - name: Publish to PyPI (release only) + if: startsWith(github.ref, 'refs/tags/v') + run: python -m twine upload -u __token__ -p ${{ secrets.PYPI_TOKEN }} --non-interactive dist/* diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml new file mode 100644 index 00000000..da4a6d8b --- /dev/null +++ b/.github/workflows/docs.yml @@ -0,0 +1,39 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +name: Docs + +on: + push: + branches: ["master"] + +jobs: + sphinx: + runs-on: ubuntu-latest + permissions: + contents: write + steps: + - uses: actions/checkout@v3 + with: + fetch-depth: 0 + - uses: actions/setup-python@v3 + with: + python-version: "3.11" + - name: Upgrade pip + run: | + python -m pip install -U pip + - name: Install dependencies + run: | + python -m pip install '.[docs]' + - name: Auto-generate APIDOC sources + run: |- + sphinx-apidoc --output-dir docs/source/code --force . + - name: Create docs + run: | + make -C docs html + - name: Deploy + uses: peaceiris/actions-gh-pages@v3 + with: + force_orphan: true + github_token: ${{ secrets.GITHUB_TOKEN }} + publish_dir: ./docs/build/html diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..d2d518e7 --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,42 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +name: Lint + +on: + push: + branches: ["*"] + +jobs: + pre-commit: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + with: + python-version: "3.11" + - name: Upgrade pip + run: |- + python -m pip install -U pip + - name: Install pre-commit + run: |- + python -m pip install pre-commit types-docutils + - name: Run Pre-Commit + run: |- + pre-commit run --all-files + pylint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v3 + - uses: actions/setup-python@v3 + with: + python-version: "3.11" + - name: Upgrade pip + run: |- + python -m pip install -U pip + - name: Install pylint + run: |- + python -m pip install pylint + - name: Run pylint + run: |- + pylint -dfixme capella2polarion || exit $(($? & ~24)) diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..975287e9 --- /dev/null +++ b/.gitignore @@ -0,0 +1,158 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +# Byte-compiled / optimized / DLL files +__pycache__/ +*.py[cod] +*$py.class + +# C extensions +*.so + +# Distribution / packaging +.Python +build/ +develop-eggs/ +dist/ +downloads/ +eggs/ +.eggs/ +lib/ +lib64/ +parts/ +sdist/ +var/ +wheels/ +share/python-wheels/ +*.egg-info/ +.installed.cfg +*.egg +MANIFEST + +# PyInstaller +# Usually these files are written by a python script from a template +# before PyInstaller builds the exe, so as to inject date/other infos into it. +*.manifest +*.spec + +# Installer logs +pip-log.txt +pip-delete-this-directory.txt + +# Unit test / coverage reports +htmlcov/ +.tox/ +.nox/ +.coverage +.coverage.* +.cache +nosetests.xml +coverage.xml +*.cover +*.py,cover +.hypothesis/ +.pytest_cache/ +cover/ + +# Translations +*.mo +*.pot + +# Django stuff: +*.log +local_settings.py +db.sqlite3 +db.sqlite3-journal + +# Flask stuff: +instance/ +.webassets-cache + +# Scrapy stuff: +.scrapy + +# Sphinx documentation +docs/_build/ +docs/source/code/ +docs/source/_build + +# PyBuilder +.pybuilder/ +target/ + +# Jupyter Notebook +.ipynb_checkpoints + +# IPython +profile_default/ +ipython_config.py + +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +# .python-version + +# pipenv +# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control. +# However, in case of collaboration, if having platform-specific dependencies or dependencies +# having no cross-platform support, pipenv may install dependencies that don't work, or not +# install all needed dependencies. +#Pipfile.lock + +# poetry +# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control. +# This is especially recommended for binary packages to ensure reproducibility, and is more +# commonly ignored for libraries. +# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control +#poetry.lock + +# pdm +# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control. +#pdm.lock +# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it +# in version control. +# https://pdm.fming.dev/#use-with-ide +.pdm.toml + +# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm +__pypackages__/ + +# Celery stuff +celerybeat-schedule +celerybeat.pid + +# SageMath parsed files +*.sage.py + +# Environments +.env +.venv +env/ +venv/ +ENV/ +env.bak/ +venv.bak/ + +# Spyder project settings +.spyderproject +.spyproject + +# Rope project settings +.ropeproject + +# mkdocs documentation +/site + +# mypy +.mypy_cache/ +.dmypy.json +dmypy.json + +# Pyre type checker +.pyre/ + +# pytype static type analyzer +.pytype/ + +# Cython debug symbols +cython_debug/ diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml new file mode 100644 index 00000000..5c8d02ae --- /dev/null +++ b/.pre-commit-config.yaml @@ -0,0 +1,106 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +default_install_hook_types: [commit-msg, pre-commit] +default_stages: [commit, merge-commit] +repos: + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v4.4.0 + hooks: + - id: check-added-large-files + - id: check-ast + - id: check-builtin-literals + - id: check-case-conflict + - id: check-executables-have-shebangs + - id: check-json + - id: check-merge-conflict + - id: check-shebang-scripts-are-executable + - id: check-symlinks + - id: check-toml + - id: check-vcs-permalinks + - id: check-xml + - id: check-yaml + - id: debug-statements + - id: destroyed-symlinks + - id: end-of-file-fixer + - id: fix-byte-order-marker + - id: trailing-whitespace + - repo: https://github.com/psf/black + rev: 23.3.0 + hooks: + - id: black + - repo: https://github.com/PyCQA/isort + rev: 5.12.0 + hooks: + - id: isort + - repo: https://github.com/PyCQA/docformatter + rev: v1.7.2 + hooks: + - id: docformatter + additional_dependencies: + - docformatter[tomli] + - repo: https://github.com/PyCQA/pydocstyle + rev: 6.3.0 + hooks: + - id: pydocstyle + exclude: "^tests/" + additional_dependencies: + - pydocstyle[toml] + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.3.0 + hooks: + - id: mypy + additional_dependencies: + - types-requests + - types-PyYAML + - repo: https://github.com/Lucas-C/pre-commit-hooks + rev: v1.5.1 + hooks: + - id: insert-license + name: Insert license headers (shell-style comments) + files: '(?:^|/)(?:.*\.(?:py|sh|toml|ya?ml)|Dockerfile|Makefile)$' + exclude: '(?:^|/)\..+|^docs/Makefile$' + args: + - --detect-license-in-X-top-lines=15 + - --license-filepath + - LICENSES/.license_header.txt + - --comment-style + - "#" + - id: insert-license + name: Insert license headers (XML-style comments) + files: '\.(?:html|md|xml)$' + exclude: '(?:^|/)\..+' + args: + - --detect-license-in-X-top-lines=15 + - --license-filepath + - LICENSES/.license_header.txt + - --comment-style + - "" + - id: insert-license + name: Insert license headers (C-style comments) + files: '\.(?:css|js|ts)$' + exclude: '(?:^|/)\..+' + args: + - --detect-license-in-X-top-lines=15 + - --license-filepath + - LICENSES/.license_header.txt + - --comment-style + - "/*| *| */" + - id: insert-license + name: Insert license headers (reST comments) + files: '\.rst$' + exclude: '(?:^|/)\..+' + args: + - --detect-license-in-X-top-lines=15 + - --license-filepath + - LICENSES/.license_header.txt + - --comment-style + - "..| |" + - repo: https://github.com/fsfe/reuse-tool + rev: v1.1.2 + hooks: + - id: reuse + - repo: https://github.com/qoomon/git-conventional-commits + rev: v2.6.4 + hooks: + - id: conventional-commits diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..65934e99 --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,161 @@ + + +# Contributing + +Thanks for your interest in our project. Contributions are always welcome! + +We are committed to fostering a welcoming, respectful, and harassment-free +environment. Be kind! + +If you have questions, ideas or want to report a bug, feel free to [open an +issue]. Or go ahead and [open a pull request] to contribute code. In order to +reduce the burden on our maintainers, please make sure that your code follows +our style guidelines outlined below. + + +[open an issue]: + https://github.com/DSD-DBS/capella2polarion/issues +[open a pull request]: + https://github.com/DSD-DBS/capella2polarion/pulls + +## Developing + +We recommend that you +[develop inside of a virtual environment](README.md#installation). After you +have set it up, simply run the unit tests to verify that everything is set up +correctly: + +```zsh +pytest +``` + +We additionally recommend that you set up your editor / IDE as follows. + +- Indent with 4 spaces per level of indentation + +- Maximum line length of 79 (add a ruler / thin line / highlighting / ...) + +- _If you use Visual Studio Code_: Consider using a platform which supports + third-party language servers more easily, and continue with the next point. + + Otherwise, set up the editor to run `black`, `pylint` and `mypy` when saving. + To enable automatic import sorting with `isort`, add the following to your + `settings.json`: + + ```json + "[python]": { + "editor.codeActionsOnSave": { + "source.organizeImports": true + } + } + ``` + + Note that the Pylance language server is not recommended, as it occasionally + causes false-positive errors for perfectly valid code. + +- _If you do not use VSC_: Set up your editor to use the [python-lsp-server], + and make sure that the relevant plugins are installed. You can install + everything that's needed into the virtualenv with pip: + + [python-lsp-server]: https://github.com/python-lsp/python-lsp-server + + ```zsh + pip install "python-lsp-server[pylint]" python-lsp-black pyls-isort pylsp-mypy + ``` + + This will provide as-you-type linting as well as automatic formatting on + save. Language server clients are available for a wide range of editors, from + Vim/Emacs to PyCharm/IDEA. + +## Code style + +We base our code style on a modified version of the +[Google style guide for Python code](https://google.github.io/styleguide/pyguide.html). +The key differences are: + +- **Docstrings**: The [Numpy style guide] applies here. + + [numpy style guide]: + https://numpydoc.readthedocs.io/en/latest/format.html#docstring-standard + + When writing docstrings for functions, use the imperative style, as per + [PEP-257]). For example, write "Do X and Y" instead of "Does X and Y". + + [pep-257]: https://peps.python.org/pep-0257/ + +- **Overridden methods**: If the documentation did not change from the base + class (i.e. the base class' method's docstring still applies without + modification), do not add a short docstring รก la "See base class". This lets + automated tools pick up the full base class docstring instead, and is + therefore more useful in IDEs etc. + +- **Linting**: Use [pylint] for static code analysis, and [mypy] for static + type checking. + + [pylint]: https://github.com/PyCQA/pylint + [mypy]: https://github.com/python/mypy + +- **Formatting**: Use [black] as code auto-formatter. The maximum line length + is 79, as per [PEP-8]. This setting should be automatically picked up from + the `pyproject.toml` file. The reason for the shorter line length is that it + avoids wrapping and overflows in side-by-side split views (e.g. diffs) if + there's also information displayed to the side of it (e.g. a tree view of the + modified files). + + [black]: https://github.com/psf/black + [pep-8]: https://www.python.org/dev/peps/pep-0008/ + + Be aware of the different line length of 72 for docstrings. We currently do + not have a satisfactory solution to automatically apply or enforce this. + + Note that, while you're encouraged to do so in general, it is not a hard + requirement to break up long strings into smaller parts. Additionally, never + break up strings that are presented to the user in e.g. log messages, as that + makes it significantly harder to grep for them. + + Use [isort] for automatic sorting of imports. Its settings should + automatically be picked up from the `pyproject.toml` file as well. + + [isort]: https://github.com/PyCQA/isort + +- **Typing**: We do not make an exception for `typing` imports. Instead of + writing `from typing import SomeName`, use `import typing as t` and access + typing related classes like `t.TypedDict`. + + + + Use the new syntax and classes for typing introduced with Python 3.10 and available using + `from __future__ import annotations` since Python 3.8. + + Be aware however that this only works in the context of annotations; the code + still needs to run on Python 3.9! This means that in some (rare) cases, you _must_ use the + old-style type hints. + + - Instead of `t.Tuple`, `t.List` etc. use the builtin classes `tuple`, `list` + etc. + - For classes that are not builtin (e.g. `Iterable`), + `import collections.abc as cabc` and then use them like `cabc.Iterable`. + - Use [PEP-604-style unions], e.g. `int | float` instead of + `t.Union[int, float]`. + - Use `... | None` (with `None` always as the last union member) instead of + `t.Optional[...]` and always explicitly annotate where `None` is possible. + + [pep-604-style unions]: https://www.python.org/dev/peps/pep-0604/ + +- **Python style rules**: For conflicting parts, the [Black code style] wins. + If you have set up black correctly, you don't need to worry about this though + :) + + [black code style]: + https://black.readthedocs.io/en/stable/the_black_code_style/current_style.html + +- When working with `dict`s, consider using `t.TypedDict` instead of a more + generic `dict[str, float|int|str]`-like annotation where possible, as the + latter is much less precise (often requiring additional `assert`s or + `isinstance` checks to pass) and can grow unwieldy very quickly. + +- Prefer `t.NamedTuple` over `collections.namedtuple`, because the former uses + a more convenient `class ...:` syntax and also supports type annotations. diff --git a/LICENSES/.license_header.txt b/LICENSES/.license_header.txt new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/LICENSES/.license_header.txt @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/LICENSES/Apache-2.0.txt b/LICENSES/Apache-2.0.txt new file mode 100644 index 00000000..d6456956 --- /dev/null +++ b/LICENSES/Apache-2.0.txt @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/LICENSES/CC0-1.0.txt b/LICENSES/CC0-1.0.txt new file mode 100644 index 00000000..0e259d42 --- /dev/null +++ b/LICENSES/CC0-1.0.txt @@ -0,0 +1,121 @@ +Creative Commons Legal Code + +CC0 1.0 Universal + + CREATIVE COMMONS CORPORATION IS NOT A LAW FIRM AND DOES NOT PROVIDE + LEGAL SERVICES. DISTRIBUTION OF THIS DOCUMENT DOES NOT CREATE AN + ATTORNEY-CLIENT RELATIONSHIP. CREATIVE COMMONS PROVIDES THIS + INFORMATION ON AN "AS-IS" BASIS. CREATIVE COMMONS MAKES NO WARRANTIES + REGARDING THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS + PROVIDED HEREUNDER, AND DISCLAIMS LIABILITY FOR DAMAGES RESULTING FROM + THE USE OF THIS DOCUMENT OR THE INFORMATION OR WORKS PROVIDED + HEREUNDER. + +Statement of Purpose + +The laws of most jurisdictions throughout the world automatically confer +exclusive Copyright and Related Rights (defined below) upon the creator +and subsequent owner(s) (each and all, an "owner") of an original work of +authorship and/or a database (each, a "Work"). + +Certain owners wish to permanently relinquish those rights to a Work for +the purpose of contributing to a commons of creative, cultural and +scientific works ("Commons") that the public can reliably and without fear +of later claims of infringement build upon, modify, incorporate in other +works, reuse and redistribute as freely as possible in any form whatsoever +and for any purposes, including without limitation commercial purposes. +These owners may contribute to the Commons to promote the ideal of a free +culture and the further production of creative, cultural and scientific +works, or to gain reputation or greater distribution for their Work in +part through the use and efforts of others. + +For these and/or other purposes and motivations, and without any +expectation of additional consideration or compensation, the person +associating CC0 with a Work (the "Affirmer"), to the extent that he or she +is an owner of Copyright and Related Rights in the Work, voluntarily +elects to apply CC0 to the Work and publicly distribute the Work under its +terms, with knowledge of his or her Copyright and Related Rights in the +Work and the meaning and intended legal effect of CC0 on those rights. + +1. Copyright and Related Rights. A Work made available under CC0 may be +protected by copyright and related or neighboring rights ("Copyright and +Related Rights"). Copyright and Related Rights include, but are not +limited to, the following: + + i. the right to reproduce, adapt, distribute, perform, display, + communicate, and translate a Work; + ii. moral rights retained by the original author(s) and/or performer(s); +iii. publicity and privacy rights pertaining to a person's image or + likeness depicted in a Work; + iv. rights protecting against unfair competition in regards to a Work, + subject to the limitations in paragraph 4(a), below; + v. rights protecting the extraction, dissemination, use and reuse of data + in a Work; + vi. database rights (such as those arising under Directive 96/9/EC of the + European Parliament and of the Council of 11 March 1996 on the legal + protection of databases, and under any national implementation + thereof, including any amended or successor version of such + directive); and +vii. other similar, equivalent or corresponding rights throughout the + world based on applicable law or treaty, and any national + implementations thereof. + +2. Waiver. To the greatest extent permitted by, but not in contravention +of, applicable law, Affirmer hereby overtly, fully, permanently, +irrevocably and unconditionally waives, abandons, and surrenders all of +Affirmer's Copyright and Related Rights and associated claims and causes +of action, whether now known or unknown (including existing as well as +future claims and causes of action), in the Work (i) in all territories +worldwide, (ii) for the maximum duration provided by applicable law or +treaty (including future time extensions), (iii) in any current or future +medium and for any number of copies, and (iv) for any purpose whatsoever, +including without limitation commercial, advertising or promotional +purposes (the "Waiver"). Affirmer makes the Waiver for the benefit of each +member of the public at large and to the detriment of Affirmer's heirs and +successors, fully intending that such Waiver shall not be subject to +revocation, rescission, cancellation, termination, or any other legal or +equitable action to disrupt the quiet enjoyment of the Work by the public +as contemplated by Affirmer's express Statement of Purpose. + +3. Public License Fallback. Should any part of the Waiver for any reason +be judged legally invalid or ineffective under applicable law, then the +Waiver shall be preserved to the maximum extent permitted taking into +account Affirmer's express Statement of Purpose. In addition, to the +extent the Waiver is so judged Affirmer hereby grants to each affected +person a royalty-free, non transferable, non sublicensable, non exclusive, +irrevocable and unconditional license to exercise Affirmer's Copyright and +Related Rights in the Work (i) in all territories worldwide, (ii) for the +maximum duration provided by applicable law or treaty (including future +time extensions), (iii) in any current or future medium and for any number +of copies, and (iv) for any purpose whatsoever, including without +limitation commercial, advertising or promotional purposes (the +"License"). The License shall be deemed effective as of the date CC0 was +applied by Affirmer to the Work. Should any part of the License for any +reason be judged legally invalid or ineffective under applicable law, such +partial invalidity or ineffectiveness shall not invalidate the remainder +of the License, and in such case Affirmer hereby affirms that he or she +will not (i) exercise any of his or her remaining Copyright and Related +Rights in the Work or (ii) assert any associated claims and causes of +action with respect to the Work, in either case contrary to Affirmer's +express Statement of Purpose. + +4. Limitations and Disclaimers. + + a. No trademark or patent rights held by Affirmer are waived, abandoned, + surrendered, licensed or otherwise affected by this document. + b. Affirmer offers the Work as-is and makes no representations or + warranties of any kind concerning the Work, express, implied, + statutory or otherwise, including without limitation warranties of + title, merchantability, fitness for a particular purpose, non + infringement, or the absence of latent or other defects, accuracy, or + the present or absence of errors, whether or not discoverable, all to + the greatest extent permissible under applicable law. + c. Affirmer disclaims responsibility for clearing rights of other persons + that may apply to the Work or any use thereof, including without + limitation any person's Copyright and Related Rights in the Work. + Further, Affirmer disclaims responsibility for obtaining any necessary + consents, permissions or other rights required for any use of the + Work. + d. Affirmer understands and acknowledges that Creative Commons is not a + party to this document and has no duty or obligation with respect to + this CC0 or use of the Work. diff --git a/README.md b/README.md new file mode 100644 index 00000000..bd68773f --- /dev/null +++ b/README.md @@ -0,0 +1,56 @@ + + +# capella2polarion + + +![image](https://github.com/DSD-DBS/capella-polarion/actions/workflows/build-test-publish.yml/badge.svg) + +Synchronise Capella models with Polarion projects + +# Documentation + + +Read the [full documentation on GitLab pages](https://dsd-dbs.github.io/capella-polarion). + +# Installation + +You can install the latest released version directly from PyPI (**Not yet**). + +```zsh +pip install capella2polarion +``` + +To set up a development environment, clone the project and install it into a +virtual environment. + +```zsh +git clone https://github.com/DSD-DBS/capella-polarion.git +cd capella2polarion +python -m venv .venv + +source .venv/bin/activate.sh # for Linux / Mac +.venv\Scripts\activate # for Windows + +pip install -U pip pre-commit +pip install -e '.[docs,test]' +pre-commit install +``` + +# Contributing + +We'd love to see your bug reports and improvement suggestions! Please take a +look at our [guidelines for contributors](CONTRIBUTING.md) for details. + +# Licenses + +This project is compliant with the +[REUSE Specification Version 3.0](https://git.fsfe.org/reuse/docs/src/commit/d173a27231a36e1a2a3af07421f5e557ae0fec46/spec.md). + +Copyright DB Netz AG, licensed under Apache 2.0 (see full text in +[LICENSES/Apache-2.0.txt](LICENSES/Apache-2.0.txt)) + +Dot-files are licensed under CC0-1.0 (see full text in +[LICENSES/CC0-1.0.txt](LICENSES/CC0-1.0.txt)) diff --git a/capella2polarion/__init__.py b/capella2polarion/__init__.py new file mode 100644 index 00000000..39a993ea --- /dev/null +++ b/capella2polarion/__init__.py @@ -0,0 +1,10 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""The capella2polarion package.""" +from importlib import metadata + +try: + __version__ = metadata.version("capella2polarion") +except metadata.PackageNotFoundError: + __version__ = "0.0.0+unknown" +del metadata diff --git a/capella2polarion/__main__.py b/capella2polarion/__main__.py new file mode 100644 index 00000000..7357e331 --- /dev/null +++ b/capella2polarion/__main__.py @@ -0,0 +1,226 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Main entry point into capella2polarion.""" + +import json +import logging +import os +import pathlib +import sys +import typing as t +from itertools import chain + +import capellambse +import click +import yaml +from capellambse import cli_helpers + +from capella2polarion import elements, polarion_api + +logger = logging.getLogger(__name__) + + +def _read_and_check_environment_vars(ctx: click.core.Context) -> None: + if pathlib.Path(".env").is_file(): + try: + import dotenv + + dotenv.load_dotenv(".env") + except ImportError: + logger.warning( + "Install the optional 'dev' project dependencies if you want " + "to load environment variables from the '.env' file!" + ) + + ctx.obj["POLARION_HOST"] = os.getenv("POLARION_HOST", "") + if not ctx.obj["POLARION_HOST"]: + logger.error( + "Cannot read the host URL for the Polarion server! " + "Tried to read the environment variable 'POLARION_HOST'." + ) + sys.exit(1) + + if not os.getenv("POLARION_PAT", ""): + logger.error( + "Cannot read the Personal Access Token (PAT) for the Polarion " + "server! Tried to read the environment variable 'POLARION_PAT'." + ) + sys.exit(1) + + +def _set_up_logger(debug: bool) -> None: + level = logging.DEBUG if debug else logging.WARNING + assert isinstance(logger.parent, logging.RootLogger) + logger.parent.setLevel(level) + formatter = logging.Formatter( + "%(asctime)-15s - %(levelname)-8s %(message)s" + ) + console_handler = logging.StreamHandler() + console_handler.setLevel(level) + console_handler.setFormatter(formatter) + console_handler.addFilter( + lambda record: record.name.startswith("capella2polarion") + or (record.name == "httpx" and record.levelname == "INFO") + ) + logger.parent.addHandler(console_handler) + + +def _get_roles_from_config(ctx: dict[str, t.Any]) -> dict[str, list[str]]: + if special_config := ctx["CONFIG"].pop("*", []): + ctx["CONFIG"] = _sanitize_config(ctx["CONFIG"], special_config) + + roles: dict[str, list[str]] = {} + for typ in chain.from_iterable(ctx["CONFIG"].values()): + if isinstance(typ, dict): + for key, role_ids in typ.items(): + roles[key] = list(role_ids) + else: + roles[typ] = [] + return roles + + +def _sanitize_config( + config: dict[str, list[str | dict[str, t.Any]]], special: dict[str, t.Any] +) -> dict[str, t.Any]: + new_config: dict[str, t.Any] = {} + for layer, xtypes in config.items(): + new_entries: list[str | dict[str, t.Any]] = [] + for xtype in xtypes: + if isinstance(xtype, dict): + for sub_key, sub_value in xtype.items(): + new_value = ( + special.get("*", []) + + special.get(sub_key, []) + + sub_value + ) + new_entries.append({sub_key: new_value}) + else: + if new_value := special.get("*", []) + special.get(xtype, []): + new_entries.append({xtype: new_value}) + else: + new_entries.append(xtype) + new_config[layer] = new_entries + + return new_config + + +def get_polarion_id_map( + ctx: dict[str, t.Any], type_: str = "" +) -> dict[str, str]: + """Map workitem IDs to Capella UUID or empty string if not set.""" + types_ = map(elements.helpers.resolve_element_type, ctx.get("TYPES", [])) + types = [type_] if type_ else list(types_) + return ctx["API"].get_work_item_element_mapping(types) + + +@click.group() +@click.option("--debug/--no-debug", is_flag=True, default=False) +@click.option("--project-id", required=True, type=str) +@click.option("--delete", is_flag=True, default=False) +@click.pass_context +def cli( + ctx: click.core.Context, debug: bool, project_id: str, delete: bool = False +) -> None: + """Synchronise data from Capella to Polarion. + + PROJECT_ID is a Polarion project id + """ + ctx.ensure_object(dict) + _read_and_check_environment_vars(ctx) + _set_up_logger(debug) + ctx.obj["PROJECT_ID"] = project_id + ctx.obj["API"] = polarion_api.OpenAPIPolarionProjectClient( + project_id, + capella_uuid_attribute=elements.UUID_ATTR_NAME, + delete_polarion_work_items=delete, + polarion_api_endpoint=f"{ctx.obj['POLARION_HOST']}/rest/v1", + polarion_access_token=os.environ["POLARION_PAT"], + ) + if not ctx.obj["API"].project_exists(): + sys.exit(1) + + +@cli.command() +@click.argument( + "diagram_cache", + type=click.Path( + exists=True, + file_okay=False, + readable=True, + resolve_path=True, + path_type=pathlib.Path, + ), +) +@click.pass_context +def diagrams(ctx: click.core.Context, diagram_cache: pathlib.Path) -> None: + """Synchronise diagrams.""" + logger.debug( + "Synchronising diagrams from diagram cache at '%s' " + "to Polarion project with id %r...", + diagram_cache, + ctx.obj["PROJECT_ID"], + ) + idx_file = diagram_cache / "index.json" + if not idx_file.is_file(): + logger.error("Cannot find diagram cache index file '%s'!", idx_file) + sys.exit(1) + + ctx.obj["DIAGRAM_CACHE"] = diagram_cache + ctx.obj["DIAGRAM_IDX"] = json.loads(idx_file.read_text(encoding="utf8")) + ctx.obj["CAPELLA_UUIDS"] = [ + d["uuid"] for d in ctx.obj["DIAGRAM_IDX"] if d["success"] + ] + ctx.obj["POLARION_ID_MAP"] = get_polarion_id_map(ctx.obj, "diagram") + + elements.delete_work_items(ctx.obj) + elements.diagram.update_diagrams(ctx.obj) + elements.diagram.create_diagrams(ctx.obj) + + +@cli.command() +@click.argument("model", type=cli_helpers.ModelCLI()) +@click.argument("config_file", type=click.File(mode="r", encoding="utf8")) +@click.pass_context +def model_elements( + ctx: click.core.Context, + model: capellambse.MelodyModel, + config_file: t.TextIO, +) -> None: + """Synchronise model elements.""" + ctx.obj["MODEL"] = model + ctx.obj["CONFIG"] = yaml.safe_load(config_file) + ctx.obj["ROLES"] = _get_roles_from_config(ctx.obj) + ( + ctx.obj["ELEMENTS"], + ctx.obj["POLARION_TYPE_MAP"], + ) = elements.get_elements_and_type_map(ctx.obj) + ctx.obj["CAPELLA_UUIDS"] = set(ctx.obj["POLARION_TYPE_MAP"]) + ctx.obj["TYPES"] = elements.get_types(ctx.obj) + ctx.obj["POLARION_ID_MAP"] = get_polarion_id_map(ctx.obj) + + elements.delete_work_items(ctx.obj) + elements.element.update_work_items(ctx.obj) + elements.element.create_work_items(ctx.obj) + + ctx.obj["POLARION_ID_MAP"] = get_polarion_id_map(ctx.obj) + elements.element.update_links(ctx.obj) + + ctx.obj["POLARION_ID_MAP"] |= get_polarion_id_map(ctx.obj, "diagram") + _diagrams = [ + diagram + for diagram in model.diagrams + if diagram.uuid in ctx.obj["POLARION_ID_MAP"] + ] + ctx.obj["ROLES"]["Diagram"] = ["diagram_elements"] + elements.element.update_links(ctx.obj, _diagrams) + + elements_index_file = elements.make_model_elements_index(ctx.obj) + logger.debug( + "Synchronising model objects (%r) to Polarion project with id %r...", + str(elements_index_file), + ctx.obj["PROJECT_ID"], + ) + + +if __name__ == "__main__": + cli(obj={}) diff --git a/capella2polarion/elements/__init__.py b/capella2polarion/elements/__init__.py new file mode 100644 index 00000000..0830cea0 --- /dev/null +++ b/capella2polarion/elements/__init__.py @@ -0,0 +1,177 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Objects for synchronization of capella objects to polarion workitems.""" + +__all__ = [ + "delete_work_items", + "get_types", + "get_elements_and_type_map", + "make_model_elements_index", + "STATUS_DELETE", + "UUID_ATTR_NAME", +] + +import logging +import pathlib +import typing as t +from itertools import chain + +import yaml +from capellambse.model import common + +from capella2polarion import polarion_api + +logger = logging.getLogger(__name__) + +UUID_ATTR_NAME = "uuid_capella" +STATUS_DELETE = "deleted" +ELEMENTS_IDX_PATH = pathlib.Path("elements_index.yaml") +ACTOR_TYPES = { + "LogicalActor": "LogicalComponent", + "SystemActor": "SystemComponent", + "PhysicalActor": "PhysicalComponent", +} +PHYSICAL_COMPONENT_TYPES = { + "PhysicalComponentNode": "PhysicalComponent", + "PhysicalComponentBehavior": "PhysicalComponent", +} +POL2CAPELLA_TYPES = ( + { + "OperationalEntity": "Entity", + "OperationalInteraction": "FunctionalExchange", + } + | ACTOR_TYPES + | PHYSICAL_COMPONENT_TYPES +) + + +def delete_work_items(ctx: dict[str, t.Any]) -> None: + """Delete work items in a Polarion project. + + If the delete flag is set to ``False`` in the context work items are + marked as ``to be deleted`` via the status attribute. + + Parameters + ---------- + ctx + The context for the workitem operation to be processed. + """ + + def serialize_for_delete(uuid: str) -> str: + logger.debug( + "Delete work item %r...", + workitem_id := ctx["POLARION_ID_MAP"][uuid], + ) + return workitem_id + + uuids: set[str] = set(ctx["POLARION_ID_MAP"]) - set(ctx["CAPELLA_UUIDS"]) + work_items = [serialize_for_delete(uuid) for uuid in uuids] + if work_items: + try: + ctx["API"].delete_work_items(work_items) + except polarion_api.PolarionApiException as error: + logger.error("Deleting work items failed. %s", error.args[0]) + + +def get_types(ctx: dict[str, t.Any]) -> set[str]: + """Return a set of Polarion types from the current context.""" + xtypes = set[str]() + for obj in chain.from_iterable(ctx["ELEMENTS"].values()): + xtype = ctx["POLARION_TYPE_MAP"].get(obj.uuid, type(obj).__name__) + xtypes.add(helpers.resolve_element_type(xtype)) + return xtypes + + +def get_elements_and_type_map( + ctx: dict[str, t.Any] +) -> tuple[dict[str, list[common.GenericElement]], dict[str, str]]: + """Return an elements and UUID to Polarion type map.""" + convert_type = POL2CAPELLA_TYPES + type_map: dict[str, str] = {} + elements: dict[str, list[common.GenericElement]] = {} + for _below, pol_types in ctx["CONFIG"].items(): + below = getattr(ctx["MODEL"], _below) + for typ in pol_types: + if isinstance(typ, dict): + typ = list(typ.keys())[0] + + xtype = convert_type.get(typ, typ) + objects = ctx["MODEL"].search(xtype, below=below) + elements.setdefault(typ, []).extend(objects) + for obj in objects: + type_map[obj.uuid] = typ + + _fix_components(elements, type_map) + return elements, type_map + + +def _fix_components( + elements: dict[str, list[common.GenericElement]], type_map: dict[str, str] +) -> None: + for typ, xtype in ACTOR_TYPES.items(): + if typ not in elements: + continue + + actors: list[common.GenericElement] = [] + components: list[common.GenericElement] = [] + for obj in elements[typ]: + if obj.is_actor: + actors.append(obj) + else: + components.append(obj) + type_map[obj.uuid] = xtype + + elements[typ] = actors + elements[xtype] = components + + nodes: list[common.GenericElement] = [] + behaviors: list[common.GenericElement] = [] + components = [] + for obj in elements.get("PhysicalComponent", []): + if obj.nature is not None and obj.nature.name == "NODE": + nodes.append(obj) + type_map[obj.uuid] = "PhysicalComponentNode" + elif obj.nature is not None and obj.nature.name == "BEHAVIOR": + behaviors.append(obj) + type_map[obj.uuid] = "PhysicalComponentBehavior" + else: + components.append(obj) + + if nodes: + elements["PhysicalComponentNode"] = nodes + if behaviors: + elements["PhysicalComponentBehavior"] = behaviors + if components: + elements["PhysicalComponent"] = components + + +def make_model_elements_index(ctx: dict[str, t.Any]) -> pathlib.Path: + """Create an elements index file for all migrated elements.""" + elements: list[dict[str, t.Any]] = [] + for obj in chain.from_iterable(ctx["ELEMENTS"].values()): + element_ = {"uuid": obj.uuid, "name": obj.name} + if pid := ctx["POLARION_ID_MAP"].get(obj.uuid): + element_["id"] = pid + + for role_id in ctx["ROLES"].get(type(obj).__name__, []): + attribute = getattr(obj, role_id, None) + if attribute is None: + continue + elif isinstance(attribute, common.ElementList): + refs = [ + ctx["POLARION_ID_MAP"].get(a.uuid, a.uuid) + for a in attribute + ] + if refs: + element_[role_id] = refs + else: + element_[role_id] = ctx["POLARION_ID_MAP"].get( + attribute.uuid, attribute.uuid + ) + elements.append(element_) + + ELEMENTS_IDX_PATH.write_text(yaml.dump(elements), encoding="utf8") + return ELEMENTS_IDX_PATH + + +from . import diagram, element, helpers, serialize diff --git a/capella2polarion/elements/diagram.py b/capella2polarion/elements/diagram.py new file mode 100644 index 00000000..7c4ad21e --- /dev/null +++ b/capella2polarion/elements/diagram.py @@ -0,0 +1,62 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Objects for synchronization of Capella diagrams to polarion.""" + +import logging +import typing as t + +from capella2polarion import polarion_api +from capella2polarion.elements import serialize + +logger = logging.getLogger(__name__) + + +def create_diagrams(ctx: dict[str, t.Any]) -> None: + """Create a set of work items of type ``diagram`` in Polarion.""" + + def serialize_for_create( + diagram: dict[str, t.Any] + ) -> polarion_api.WorkItem | None: + attributes = serialize.element(diagram, ctx, serialize.diagram) + if attributes is None: + return None + return polarion_api.WorkItem(**attributes) + + uuids = set(ctx["CAPELLA_UUIDS"]) - set(ctx["POLARION_ID_MAP"]) + diagrams = [diag for diag in ctx["DIAGRAM_IDX"] if diag["uuid"] in uuids] + work_items = [serialize_for_create(diagram) for diagram in diagrams] + work_items = list(filter(None.__ne__, work_items)) + for work_item in work_items: + assert work_item is not None + logger.debug("Create work item for diagram %r...", work_item.title) + if work_items: + try: + ctx["API"].create_work_items(work_items) + except polarion_api.PolarionApiException as error: + logger.error("Creating diagrams failed. %s", error.args[0]) + + +def update_diagrams(ctx: dict[str, t.Any]) -> None: + """Update a set of work items of type ``diagram`` in Polarion.""" + uuids: set[str] = set(ctx["POLARION_ID_MAP"]) & set(ctx["CAPELLA_UUIDS"]) + diagrams = {d["uuid"]: d for d in ctx["DIAGRAM_IDX"] if d["uuid"] in uuids} + for uuid in uuids: + wid = ctx["POLARION_ID_MAP"][uuid] + diagram = diagrams[uuid] + logger.debug( + "Update work item %r for diagram %r...", wid, diagram["name"] + ) + attributes = serialize.element(diagram, ctx, serialize.diagram) + if attributes is None: + continue + + del attributes["type"] + del attributes["uuid_capella"] + attributes["status"] = "open" + try: + ctx["API"].update_work_item( + polarion_api.WorkItem(wid, **attributes) + ) + except polarion_api.PolarionApiException as error: + diag = f"{wid}({diagram['name']})" + logger.error("Updating diagram %r failed. %s", diag, error.args[0]) diff --git a/capella2polarion/elements/element.py b/capella2polarion/elements/element.py new file mode 100644 index 00000000..9f4b173f --- /dev/null +++ b/capella2polarion/elements/element.py @@ -0,0 +1,239 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Objects for synchronization of Capella model objects to Polarion.""" +from __future__ import annotations + +import collections.abc as cabc +import logging +import typing as t +from itertools import chain + +from capellambse.model import common + +from capella2polarion import polarion_api +from capella2polarion.elements import * +from capella2polarion.elements import serialize + +logger = logging.getLogger(__name__) + +TYPE_RESOLVERS = {"Part": lambda obj: obj.type.uuid} + + +def create_work_items(ctx: dict[str, t.Any]) -> None: + """Create a set of work items in Polarion.""" + + def serialize_for_create( + obj: common.GenericElement, + ) -> polarion_api.WorkItem | None: + logger.debug( + "Create work item for model element %r...", obj._short_repr_() + ) + attributes = serialize.element(obj, ctx, serialize.generic_attributes) + if attributes is None: + return None + return polarion_api.WorkItem(**attributes) + + objects = chain.from_iterable(ctx["ELEMENTS"].values()) + work_items = [ + serialize_for_create(obj) + for obj in objects + if obj.uuid not in ctx["POLARION_ID_MAP"] + ] + work_items = list(filter(None.__ne__, work_items)) + if work_items: + try: + ctx["API"].create_work_items(work_items) + except polarion_api.PolarionApiException as error: + logger.error("Creating work items failed. %s", error.args[0]) + + +def update_work_items(ctx: dict[str, t.Any]) -> None: + """Update a set of work items in Polarion.""" + for obj in chain.from_iterable(ctx["ELEMENTS"].values()): + if obj.uuid not in ctx["POLARION_ID_MAP"]: + continue + + logger.debug( + "Update work item %r for model element %r...", + wid := ctx["POLARION_ID_MAP"][obj.uuid], + obj._short_repr_(), + ) + attributes = serialize.element(obj, ctx, serialize.generic_attributes) + if attributes is None: + continue + + del attributes["type"] + del attributes["uuid_capella"] + attributes["status"] = "open" + try: + ctx["API"].update_work_item( + polarion_api.WorkItem(id=wid, **attributes) + ) + except polarion_api.PolarionApiException as error: + wi = f"{wid}({obj._short_repr_()})" + logger.error("Updating work item %r failed. %s", wi, error.args[0]) + + +class LinkBuilder(t.NamedTuple): + """Helper class for creating workitem links.""" + + context: dict[str, t.Any] + obj: common.GenericElement + + def create( + self, secondary_id: str, role_id: str + ) -> polarion_api.WorkItemLink | None: + """Post a work item link create request.""" + primary_id = self.context["POLARION_ID_MAP"][self.obj.uuid] + logger.debug( + "Create work item link %r from %r to %r for model element %r", + role_id, + primary_id, + secondary_id, + self.obj._short_repr_(), + ) + return polarion_api.WorkItemLink( + primary_id, + secondary_id, + role_id, + secondary_work_item_project=self.context["PROJECT_ID"], + ) + + +def update_links( + ctx: dict[str, t.Any], + elements: cabc.Iterable[common.GenericElement] | None = None, +) -> None: + """Create and update work item links in Polarion.""" + custom_link_resolvers = CUSTOM_LINKS + for elt in elements or chain.from_iterable(ctx["ELEMENTS"].values()): + if elt.uuid not in ctx["POLARION_ID_MAP"]: + continue + + workitem_id = ctx["POLARION_ID_MAP"][elt.uuid] + logger.debug( + "Fetching links for work item %r(%r)...", + workitem_id, + elt._short_repr_(), + ) + links: list[polarion_api.WorkItemLink] + try: + links = ctx["API"].get_all_work_item_links(workitem_id) + except polarion_api.PolarionApiException as error: + logger.error( + "Fetching links for work item %r(%r). failed %s", + workitem_id, + elt._short_repr_(), + error.args[0], + ) + continue + + link_builder = LinkBuilder(ctx, elt) + for role_id in ctx["ROLES"].get(type(elt).__name__, []): + id_link_map: dict[str, polarion_api.WorkItemLink] = {} + for link in links: + if role_id != link.role: + continue + + id_link_map[link.secondary_work_item_id] = link + + if resolver := custom_link_resolvers.get(role_id): + resolver(link_builder, role_id, id_link_map) + continue + + if (refs := getattr(elt, role_id, None)) is None: + continue + + if isinstance(refs, common.ElementList): + new = refs.by_uuid + else: + assert hasattr(refs, "uuid") + new = [refs.uuid] + + new = set(_get_work_item_ids(ctx, new, role_id)) + _handle_create_and_delete( + link_builder, role_id, new, id_link_map, id_link_map + ) + + +def _get_work_item_ids( + ctx: dict[str, t.Any], uuids: cabc.Iterable[str], role_id: str +) -> cabc.Iterator[str]: + for uuid in uuids: + if wid := ctx["POLARION_ID_MAP"].get(uuid): + yield wid + else: + obj = ctx["MODEL"].by_uuid(uuid) + logger.debug( + "Unable to create work item link %r. " + "Couldn't identify work item for %r", + role_id, + obj._short_repr_(), + ) + + +def _handle_description_reference_links( + link_builder: LinkBuilder, + role_id: str, + links: dict[str, polarion_api.WorkItemLink], +) -> None: + refs = link_builder.context["DESCR_REFERENCES"].get(link_builder.obj.uuid) + refs = set(_get_work_item_ids(link_builder.context, refs, role_id)) + _handle_create_and_delete(link_builder, role_id, refs, links, links) + + +def _handle_diagram_reference_links( + link_builder: LinkBuilder, + role_id: str, + links: dict[str, polarion_api.WorkItemLink], +) -> None: + try: + refs = set(_collect_uuids(link_builder.obj.nodes)) + refs = set(_get_work_item_ids(link_builder.context, refs, role_id)) + _handle_create_and_delete(link_builder, role_id, refs, links, links) + except StopIteration: + logger.exception( + "Could not create links for diagram %r", + link_builder.obj._short_repr_(), + ) + + +def _collect_uuids(nodes: list[common.GenericElement]) -> cabc.Iterator[str]: + type_resolvers = TYPE_RESOLVERS + for node in nodes: + uuid = node.uuid + if resolver := type_resolvers.get(type(node).__name__): + uuid = resolver(node) + + yield uuid + + +def _handle_create_and_delete( + link_builder: LinkBuilder, + role_id: str, + new: cabc.Iterable[str], + old: cabc.Iterable[str], + links: dict[str, t.Any], +) -> None: + create = set(new) - set(old) + new_links = [link_builder.create(id, role_id) for id in create] + new_links = list(filter(None.__ne__, new_links)) + if new_links: + link_builder.context["API"].create_work_item_links(new_links) + + delete = set(old) - set(new) + dead_links = [links.get(id) for id in delete] + dead_links = list(filter(None.__ne__, dead_links)) + for link in dead_links: + rep = link_builder.obj._short_repr_() + logger.debug( + "Delete work item link %r for model element %r", link, rep + ) + if dead_links: + link_builder.context["API"].delete_work_item_links(dead_links) + + +CUSTOM_LINKS = { + "description_reference": _handle_description_reference_links, + "diagram_elements": _handle_diagram_reference_links, +} diff --git a/capella2polarion/elements/helpers.py b/capella2polarion/elements/helpers.py new file mode 100644 index 00000000..2bff7d34 --- /dev/null +++ b/capella2polarion/elements/helpers.py @@ -0,0 +1,8 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Helper objects for synchronisation of capella objects to work items.""" + + +def resolve_element_type(type_: str) -> str: + """Return a valid Type ID for polarion for a given ``obj``.""" + return type_[0].lower() + type_[1:] diff --git a/capella2polarion/elements/serialize.py b/capella2polarion/elements/serialize.py new file mode 100644 index 00000000..a483fdb9 --- /dev/null +++ b/capella2polarion/elements/serialize.py @@ -0,0 +1,221 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Objects for serialization of capella objects to workitems.""" + +import base64 +import collections.abc as cabc +import logging +import mimetypes +import pathlib +import re +import typing as t + +import markupsafe +from capellambse import helpers as chelpers +from capellambse.model import common +from capellambse.model.crosslayer import cs +from capellambse.model.layers import oa, pa +from lxml import etree + +from capella2polarion.elements import helpers + +RE_DESCR_LINK_PATTERN = re.compile(r"[^<]+<\/a>") +RE_DESCR_DELETED_PATTERN = re.compile( + f"" +) +RE_CAMEL_CASE_2ND_WORD_PATTERN = re.compile(r"([a-z]+)([A-Z][a-z]+)") +DIAGRAM_STYLES = {"max-width": "100%"} + + +logger = logging.getLogger(__name__) + + +def element( + obj: dict[str, t.Any] | common.GenericElement, + ctx: dict[str, t.Any], + serializer: cabc.Callable[[t.Any, dict[str, t.Any]], dict[str, t.Any]], +) -> dict[str, t.Any] | None: + """Seralize a Capella element for the PolarionRestAPI.""" + try: + return serializer(obj, ctx) + except Exception as error: + logger.error("Serializing model element failed. %s", error.args[0]) + return None + + +def diagram(diag: dict[str, t.Any], ctx: dict[str, t.Any]) -> dict[str, t.Any]: + """Serialize a diagram for Polarion.""" + diagram_path = ctx["DIAGRAM_CACHE"] / f"{diag['uuid']}.svg" + src = _decode_diagram(diagram_path) + style = "; ".join( + (f"{key}={value}" for key, value in DIAGRAM_STYLES.items()) + ) + description = f'

' + return { + "type": "diagram", + "title": diag["name"], + "uuid_capella": diag["uuid"], + "description_type": "text/html", + "description": description, + } + + +def _decode_diagram(diagram_path: pathlib.Path) -> str: + mime_type, _ = mimetypes.guess_type(diagram_path) + if mime_type is None: + logger.error( + "Do not understand the MIME subtype for the diagram '%s'!", + diagram_path, + ) + return "" + content = diagram_path.read_bytes() + content_encoded = base64.standard_b64encode(content) + assert mime_type is not None + image_data = b"data:" + mime_type.encode() + b";base64," + content_encoded + src = image_data.decode() + return src + + +def generic_attributes( + obj: common.GenericElement, ctx: dict[str, t.Any] +) -> dict[str, t.Any]: + """Return an attributes dictionary for the given model element.""" + xtype = ctx["POLARION_TYPE_MAP"].get(obj.uuid, type(obj).__name__) + serializer = SERIALIZERS.get(xtype, _generic_attributes) + return serializer(obj, ctx) + + +def _generic_attributes( + obj: common.GenericElement, ctx: dict[str, t.Any] +) -> dict[str, t.Any]: + xtype = ctx["POLARION_TYPE_MAP"].get(obj.uuid, type(obj).__name__) + raw_description = getattr(obj, "description", markupsafe.Markup("")) + uuids, value = _sanitize_description(raw_description, ctx) + ctx.setdefault("DESCR_REFERENCES", {})[obj.uuid] = uuids + return { + "type": helpers.resolve_element_type(xtype), + "title": obj.name, + "uuid_capella": obj.uuid, + "description_type": "text/html", + "description": value, + } + + +def _sanitize_description( + descr: markupsafe.Markup, ctx: dict[str, t.Any] +) -> tuple[list[str], markupsafe.Markup]: + referenced_uuids: list[str] = [] + replaced_markup = RE_DESCR_LINK_PATTERN.sub( + lambda match: replace_markup(match, ctx, referenced_uuids), descr + ) + + # XXX: Can be removed after fix in capellambse + def repair_images(node: etree._Element) -> None: + if node.tag != "img": + return + + file_url = pathlib.PurePosixPath(node.get("src")) + workspace = file_url.parts[0] + file_path = pathlib.PurePosixPath(*file_url.parts[1:]) + mime_type, _ = mimetypes.guess_type(file_url) + resources = ctx["MODEL"]._loader.resources + filehandler = resources[["\x00", workspace][workspace in resources]] + try: + with filehandler.open(file_path, "r") as img: + b64_img = base64.b64encode(img.read()).decode("utf8") + node.attrib["src"] = f"data:{mime_type};base64,{b64_img}" + except FileNotFoundError: + logger.error("Inline image can't be found from %r", file_path) + + repaired_markup = chelpers.process_html_fragments( + replaced_markup, repair_images + ) + return referenced_uuids, repaired_markup + + +def replace_markup( + match: re.Match, + ctx: dict[str, t.Any], + referenced_uuids: list[str], + non_matcher: cabc.Callable[[str], str] = lambda i: i, +) -> str: + """Replace UUID references in a ``match`` with a work item link. + + If the UUID doesn't correspond to an existing work item the original + text is returned. + """ + uuid = match.group(1) + if pid := ctx["POLARION_ID_MAP"].get(uuid): + referenced_uuids.append(uuid) + return ( + '' + "" + ) + else: + return non_matcher(match.group(0)) + + +def operational_capability( + obj: oa.OperationalCapability, ctx: dict[str, t.Any] +) -> dict[str, t.Any]: + """Return attributes for an ``OperatioanlCapability``.""" + assert isinstance(obj, oa.OperationalCapability) + + def get_condition(cap: oa.OperationalCapability, name: str) -> str: + if not (condition := getattr(cap, name)): + return "" + return condition.specification["capella:linkedText"].striptags() + + def strike_through(string: str) -> str: + if match := RE_DESCR_DELETED_PATTERN.match(string): + string = match.group(1) + return f'{string}' + + def matcher(match: re.Match) -> str: + return strike_through(replace_markup(match, ctx, [])) + + attributes = _generic_attributes(obj, ctx) + pre_condition = RE_DESCR_DELETED_PATTERN.sub( + matcher, get_condition(obj, "precondition") + ) + post_condition = RE_DESCR_DELETED_PATTERN.sub( + matcher, get_condition(obj, "postcondition") + ) + additional = { + "preCondition": {"type": "text/html", "value": pre_condition}, + "postCondition": {"type": "text/html", "value": post_condition}, + } + return attributes | {"additional_attributes": additional} + + +def component_or_actor( + obj: cs.Component, ctx: dict[str, t.Any] +) -> dict[str, t.Any]: + """Return attributes for a ``Component``.""" + attributes = _generic_attributes(obj, ctx) + if obj.is_actor: + xtype = RE_CAMEL_CASE_2ND_WORD_PATTERN.sub( + r"\1Actor", type(obj).__name__ + ) + attributes["type"] = helpers.resolve_element_type(xtype) + return attributes + + +def physical_component( + obj: pa.PhysicalComponent, ctx: dict[str, t.Any] +) -> dict[str, t.Any]: + """Return attributes for a ``PhysicalComponent``.""" + attributes = component_or_actor(obj, ctx) + xtype = attributes["type"] + if obj.nature is not None: + attributes["type"] = f"{xtype}{obj.nature.name.capitalize()}" + return attributes + + +SERIALIZERS = { + "OperationalCapability": operational_capability, + "LogicalComponent": component_or_actor, + "SystemComponent": component_or_actor, + "PhysicalComponent": physical_component, +} diff --git a/capella2polarion/polarion_api/__init__.py b/capella2polarion/polarion_api/__init__.py new file mode 100644 index 00000000..9afa36a1 --- /dev/null +++ b/capella2polarion/polarion_api/__init__.py @@ -0,0 +1,280 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Polarion API module with data classes and an abstract API client.""" +import abc +import dataclasses +import typing as t + + +@dataclasses.dataclass +class WorkItem: + """A data class containing all relevant data of a Polarion WorkItem.""" + + id: str | None = None + title: str | None = None + description_type: str | None = None + description: str | None = None + type: str | None = None + uuid_capella: str | None = None + status: str | None = None + additional_attributes: dict[str, t.Any] = dataclasses.field( + default_factory=dict + ) + + +@dataclasses.dataclass +class WorkItemLink: + """A link between multiple Polarion WorkItems. + + The primary_work_item_id is the ID of the owner of the link, the + secondary_work_item_id represents the linked workitem. + """ + + primary_work_item_id: str + secondary_work_item_id: str + role: str + suspect: bool | None = None + secondary_work_item_project: str | None = ( + None # Use to set differing project + ) + + +class PolarionApiBaseException(Exception): + """Base exception, which is raised, if an API error occurs.""" + + +class PolarionApiException(PolarionApiBaseException): + """Exception, which is raised, if an error is raised by the API.""" + + +class PolarionApiUnexpectedException(PolarionApiBaseException): + """Exception, which is raised, if an unexpected error is raised.""" + + +class AbstractPolarionProjectApi(abc.ABC): + """An abstract base class for a Polarion API client.""" + + capella_uuid_attribute: str + delete_polarion_work_items: bool + project_id: str + delete_status: str = "deleted" + _page_size: int = 100 + _batch_size: int = 5 + + @abc.abstractmethod + def project_exists(self) -> bool: + """Return True if self.project_id exists and False if not.""" + raise NotImplementedError + + def get_work_item_element_mapping( + self, work_item_types: list[str] + ) -> dict[str, str]: + """Return a mapping of capella_uuid to work item id. + + Will be generated for all work items of the given + work_item_types. + """ + work_item_mapping: dict[str, str] = {} + _type = " ".join(work_item_types) + for work_item in self.get_all_work_items( + f"type:({_type})", + {"work_items": f"id,{self.capella_uuid_attribute}"}, + ): + if work_item.id is not None and work_item.uuid_capella is not None: + work_item_mapping[work_item.uuid_capella] = work_item.id + + return work_item_mapping + + def _request_all_items(self, call: t.Callable, **kwargs) -> list[t.Any]: + page = 1 + items, next_page = call( + **kwargs, page_size=self._page_size, page_number=page + ) + while next_page: + page += 1 + _items, next_page = call( + **kwargs, page_size=self._page_size, page_number=page + ) + items += _items + return items + + def get_all_work_items( + self, query: str, fields: dict[str, str] | None = None + ) -> list[WorkItem]: + """Get all work items matching the given query. + + Will handle pagination automatically. Define a fields dictionary + as described in the Polarion API documentation to get certain + fields. + """ + return self._request_all_items( + self.get_work_items, fields=fields, query=query + ) + + @abc.abstractmethod + def get_work_items( + self, + query: str, + fields: dict[str, str] | None = None, + page_size: int = 100, + page_number: int = 1, + ) -> tuple[list[WorkItem], bool]: + """Return the work items on a defined page matching the given query. + + In addition, a flag whether a next page is available is + returned. Define a fields dictionary as described in the + Polarion API documentation to get certain fields. + """ + raise NotImplementedError + + def create_work_item(self, work_item: WorkItem): + """Create a single given work item.""" + return self.create_work_items([work_item]) + + def create_work_items(self, work_items: list[WorkItem]): + """Create the given list of work items.""" + for i in range(0, len(work_items), self._batch_size): + self._create_work_items(work_items[i : i + self._batch_size]) + + @abc.abstractmethod + def _create_work_items(self, work_items: list[WorkItem]): + """Create the given list of work items. + + A maximum of 5 items is allowed only at once. + """ + raise NotImplementedError + + def delete_work_item(self, work_item_id: str): + """Delete or mark the defined work item as deleted.""" + return self.delete_work_items([work_item_id]) + + def delete_work_items(self, work_item_ids: list[str]): + """Delete or mark the defined work items as deleted.""" + if self.delete_polarion_work_items: + return self._delete_work_items(work_item_ids) + return self._mark_delete_work_items(work_item_ids) + + @abc.abstractmethod + def _delete_work_items(self, work_item_ids: list[str]): + """Actually perform a delete request for the given work items.""" + raise NotImplementedError + + def _mark_delete_work_items(self, work_item_ids: list[str]): + """Set the status for all given work items to self.delete_status.""" + for work_item_id in work_item_ids: + self.update_work_item( + WorkItem(id=work_item_id, status=self.delete_status) + ) + + @abc.abstractmethod + def update_work_item(self, work_item: WorkItem): + """Update the given work item in Polarion. + + Only fields not set to None will be updated in Polarion. None + fields will stay untouched. + """ + raise NotImplementedError + + def get_all_work_item_links( + self, + work_item_id: str, + fields: dict[str, str] | None = None, + include: str | None = None, + ) -> list[WorkItemLink]: + """Get all work item links for the given work item. + + Define a fields dictionary as described in the Polarion API + documentation to get certain fields. + """ + return self._request_all_items( + self.get_work_item_links, + work_item_id=work_item_id, + fields=fields, + include=include, + ) + + @abc.abstractmethod + def get_work_item_links( + self, + work_item_id: str, + fields: dict[str, str] | None = None, + include: str | None = None, + page_size: int = 100, + page_number: int = 1, + ) -> tuple[list[WorkItemLink], bool]: + """Get the work item links for the given work item on a page. + + In addition, a flag whether a next page is available is + returned. Define a fields dictionary as described in the + Polarion API documentation to get certain fields. + """ + raise NotImplementedError + + def create_work_item_links(self, work_item_links: list[WorkItemLink]): + """Create the links between the work items in work_item_links.""" + for split_work_item_links in self._group_links( + work_item_links + ).values(): + for i in range(0, len(split_work_item_links), self._batch_size): + self._create_work_item_links( + split_work_item_links[i : i + self._batch_size] + ) + + @abc.abstractmethod + def _create_work_item_links(self, work_item_links: list[WorkItemLink]): + """Create the links between the work items in work_item_links. + + All work item links must have the same primary work item. + """ + raise NotImplementedError + + def _set_project(self, work_item_link: WorkItemLink): + if work_item_link.secondary_work_item_project is None: + work_item_link.secondary_work_item_project = self.project_id + + def _group_links( + self, + work_item_links: list[WorkItemLink], + ) -> dict[str, list[WorkItemLink]]: + """Group a list of work item links by their primary work item. + + Returns a dict with the primary work items as keys. + """ + work_item_link_dict: dict[str, list[WorkItemLink]] = {} + for work_item_link in work_item_links: + self._set_project(work_item_link) + if work_item_link.primary_work_item_id not in work_item_link_dict: + work_item_link_dict[work_item_link.primary_work_item_id] = [] + + work_item_link_dict[work_item_link.primary_work_item_id].append( + work_item_link + ) + return work_item_link_dict + + def create_work_item_link(self, work_item_link: WorkItemLink): + """Create the link between the work items in work_item_link.""" + self._set_project(work_item_link) + self._create_work_item_links([work_item_link]) + + def delete_work_item_links(self, work_item_links: list[WorkItemLink]): + """Delete the links between the work items in work_item_link.""" + for split_work_item_links in self._group_links( + work_item_links + ).values(): + self._delete_work_item_links(split_work_item_links) + + @abc.abstractmethod + def _delete_work_item_links(self, work_item_links: list[WorkItemLink]): + """Delete the links between the work items in work_item_link. + + All work item links have to have the same primary work item. + """ + raise NotImplementedError + + def delete_work_item_link(self, work_item_link: WorkItemLink): + """Delete the links between the work items in work_item_link.""" + self._set_project(work_item_link) + self._delete_work_item_links([work_item_link]) + + +from client import * diff --git a/capella2polarion/polarion_api/client.py b/capella2polarion/polarion_api/client.py new file mode 100644 index 00000000..7ac4a55c --- /dev/null +++ b/capella2polarion/polarion_api/client.py @@ -0,0 +1,356 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""The actual implementation of the API client using an OpenAPIClient.""" +import json +import logging + +from polarion_rest_api_client import client as oa_client +from polarion_rest_api_client import models as api_models +from polarion_rest_api_client import types as oa_types +from polarion_rest_api_client.api.linked_work_items import ( + delete_linked_work_items, + get_linked_work_items, + post_linked_work_items, +) +from polarion_rest_api_client.api.projects import get_project +from polarion_rest_api_client.api.work_items import ( + delete_work_items, + get_work_items, + patch_work_item, + post_work_items, +) + +from capella2polarion.polarion_api import ( + AbstractPolarionProjectApi, + PolarionApiException, + PolarionApiUnexpectedException, + WorkItem, + WorkItemLink, +) + +logger = logging.getLogger(__name__) + + +def _build_sparse_fields( + fields_dict: dict[str, str] | None +) -> api_models.SparseFields | oa_types.Unset: + """Build the SparseFields object based on a dict. + + Ensure that every key follow the pattern 'fields[XXX]'. + """ + if fields_dict is None: + return oa_types.Unset() + new_field_dict: dict[str, str] = {} + for key, value in fields_dict.items(): + if key.startswith("fields["): + new_field_dict[key] = value + else: + new_field_dict[f"fields[{key}]"] = value + return api_models.SparseFields.from_dict(new_field_dict) + + +class OpenAPIPolarionProjectClient(AbstractPolarionProjectApi): + """A Polarion Project Client using an auto generated OpenAPI-Client.""" + + client: oa_client.AuthenticatedClient + + def __init__( + self, + project_id: str, + capella_uuid_attribute: str, + delete_polarion_work_items: bool, + polarion_api_endpoint: str, + polarion_access_token: str, + batch_size: int = 5, + page_size: int = 100, + ): + """Initialize the client for project and endpoint using a token.""" + self.project_id = project_id + self.capella_uuid_attribute = capella_uuid_attribute + self.delete_polarion_work_items = delete_polarion_work_items + self.client = oa_client.AuthenticatedClient( + polarion_api_endpoint, polarion_access_token + ) + self._batch_size = batch_size + self._page_size = page_size + + def _check_response(self, response: oa_types.Response): + if response.status_code in range(400, 600): + try: + error = api_models.Errors.from_dict( + json.loads(response.content.decode()) + ) + raise PolarionApiException( + *[(e.status, e.detail) for e in error.errors] + ) + except json.JSONDecodeError as error: + raise PolarionApiUnexpectedException( + response.status_code, response.content + ) from error + + def _build_work_item_post_request( + self, work_item: WorkItem + ) -> api_models.WorkitemsListPostRequestDataItem: + attrs = api_models.WorkitemsListPostRequestDataItemAttributes( + work_item.type, + api_models.WorkitemsListPostRequestDataItemAttributesDescription( + api_models.WorkitemsListPostRequestDataItemAttributesDescriptionType( + work_item.description_type + ), + work_item.description, + ), + status=work_item.status, + title=work_item.title, + ) + + attrs.additional_properties.update(work_item.additional_attributes) + + if work_item.uuid_capella is not None: + attrs.additional_properties[ + self.capella_uuid_attribute + ] = work_item.uuid_capella + + return api_models.WorkitemsListPostRequestDataItem( + api_models.WorkitemsListPostRequestDataItemType.WORKITEMS, attrs + ) + + def _build_work_item_patch_request( + self, work_item: WorkItem + ) -> api_models.WorkitemsSinglePatchRequest: + attrs = api_models.WorkitemsSinglePatchRequestDataAttributes() + + if work_item.title is not None: + attrs.title = work_item.title + + if work_item.description is not None: + attrs.description = api_models.WorkitemsSinglePatchRequestDataAttributesDescription( + api_models.WorkitemsSinglePatchRequestDataAttributesDescriptionType( + work_item.description_type + ), + work_item.description, + ) + + if work_item.status is not None: + attrs.status = work_item.status + + if work_item.additional_attributes is not None: + attrs.additional_properties.update(work_item.additional_attributes) + + if work_item.uuid_capella is not None: + attrs.additional_properties[ + self.capella_uuid_attribute + ] = work_item.uuid_capella + + return api_models.WorkitemsSinglePatchRequest( + api_models.WorkitemsSinglePatchRequestData( + api_models.WorkitemsSinglePatchRequestDataType.WORKITEMS, + f"{self.project_id}/{work_item.id}", + attrs, + ) + ) + + def project_exists(self) -> bool: + """Return True if self.project_id exists and False if not.""" + response = get_project.sync_detailed( + self.project_id, client=self.client + ) + if not response.status_code == 200: + logger.error("Polarion request: %s", response.content) + return False + return True + + def get_work_items( + self, + query: str, + fields: dict[str, str] | None = None, + page_size: int = 100, + page_number: int = 1, + ) -> tuple[list[WorkItem], bool]: + """Return the work items on a defined page matching the given query. + + In addition, a flag whether a next page is available is + returned. Define a fields dictionary as described in the + Polarion API documentation to get certain fields. + """ + fields = _build_sparse_fields(fields) + response = get_work_items.sync_detailed( + self.project_id, + client=self.client, + fields=fields, + query=query, + pagesize=page_size, + pagenumber=page_number, + ) + + self._check_response(response) + + work_items_response = response.parsed + + work_items: list[WorkItem] = [] + for work_item in work_items_response.data: + if not getattr(work_item.meta, "errors", []): + work_items.append( + WorkItem( + work_item.id.split("/")[-1], + work_item.attributes.title, + str(work_item.attributes.description.type) + if work_item.attributes.description + else None, + work_item.attributes.description.value + if work_item.attributes.description + else None, + work_item.attributes.type, + work_item.attributes.additional_properties[ + self.capella_uuid_attribute + ], + work_item.attributes.status, + work_item.attributes.additional_properties, + ) + ) + + return work_items, bool(work_items_response.links.next_) + + def _create_work_items(self, work_items: list[WorkItem]): + """Create the given list of work items.""" + response = post_work_items.sync_detailed( + self.project_id, + client=self.client, + json_body=api_models.WorkitemsListPostRequest( + [ + self._build_work_item_post_request(work_item) + for work_item in work_items + ] + ), + ) + + self._check_response(response) + + def _delete_work_items(self, work_item_ids: list[str]): + response = delete_work_items.sync_detailed( + self.project_id, + client=self.client, + json_body=api_models.WorkitemsListDeleteRequest( + [ + api_models.WorkitemsListDeleteRequestDataItem( + api_models.WorkitemsListDeleteRequestDataItemType.WORKITEMS, + f"{self.project_id}/{work_item_id}", + ) + for work_item_id in work_item_ids + ] + ), + ) + + self._check_response(response) + + def update_work_item(self, work_item: WorkItem): + """Update the given work item in Polarion. + + Only fields not set to None will be updated in Polarion. None + fields will stay untouched. + """ + response = patch_work_item.sync_detailed( + self.project_id, + work_item.id, + client=self.client, + json_body=self._build_work_item_patch_request(work_item), + ) + + self._check_response(response) + + def get_work_item_links( + self, + work_item_id: str, + fields: dict[str, str] | None = None, + include: str | None = None, + page_size: int = 100, + page_number: int = 1, + ) -> tuple[list[WorkItemLink], bool]: + """Get the work item links for the given work item on a page. + + In addition, a flag whether a next page is available is + returned. Define a fields dictionary as described in the + Polarion API documentation to get certain fields. + """ + if fields is None: + fields = {"linkedworkitems": "id,role,suspect"} + + fields = _build_sparse_fields(fields) + response = get_linked_work_items.sync_detailed( + self.project_id, + work_item_id, + client=self.client, + fields=fields, + include=include, + pagesize=page_size, + pagenumber=page_number, + ) + + self._check_response(response) + + work_item_links: list[WorkItemLink] = [] + for link in response.parsed.data: + info = link.id.split("/") + assert len(info) == 5 + role_id, target_project_id, linked_work_item_id = info[2:] + suspect = link.attributes.suspect + if isinstance(suspect, oa_types.Unset): + suspect = False + + work_item_links.append( + WorkItemLink( + work_item_id, + linked_work_item_id, + role_id, + suspect, + target_project_id, + ) + ) + + return work_item_links, bool(response.parsed.links.next_) + + def _create_work_item_links(self, work_item_links: list[WorkItemLink]): + response = post_linked_work_items.sync_detailed( + self.project_id, + work_item_links[0].primary_work_item_id, + client=self.client, + json_body=api_models.LinkedworkitemsListPostRequest( + [ + api_models.LinkedworkitemsListPostRequestDataItem( + api_models.LinkedworkitemsListPostRequestDataItemType.LINKEDWORKITEMS, + api_models.LinkedworkitemsListPostRequestDataItemAttributes( + role=work_item_link.role, + suspect=work_item_link.suspect or False, + ), + api_models.LinkedworkitemsListPostRequestDataItemRelationships( + api_models.LinkedworkitemsListPostRequestDataItemRelationshipsWorkItem( + api_models.LinkedworkitemsListPostRequestDataItemRelationshipsWorkItemData( + api_models.LinkedworkitemsListPostRequestDataItemRelationshipsWorkItemDataType.WORKITEMS, + f"{work_item_link.secondary_work_item_project}/{work_item_link.secondary_work_item_id}", + ) + ) + ), + ) + for work_item_link in work_item_links + ] + ), + ) + + self._check_response(response) + + def _delete_work_item_links(self, work_item_links: list[WorkItemLink]): + response = delete_linked_work_items.sync_detailed( + self.project_id, + work_item_links[0].primary_work_item_id, + client=self.client, + json_body=api_models.LinkedworkitemsListDeleteRequest( + [ + api_models.LinkedworkitemsListDeleteRequestDataItem( + api_models.LinkedworkitemsListDeleteRequestDataItemType.LINKEDWORKITEMS, + f"{self.project_id}/{work_item_link.primary_work_item_id}/{work_item_link.role}/{work_item_link.secondary_work_item_project}/{work_item_link.secondary_work_item_id}", + ) + for work_item_link in work_item_links + ] + ), + ) + + self._check_response(response) diff --git a/ci-templates/gitlab/synchronise_diagrams.yml b/ci-templates/gitlab/synchronise_diagrams.yml new file mode 100644 index 00000000..e05cae29 --- /dev/null +++ b/ci-templates/gitlab/synchronise_diagrams.yml @@ -0,0 +1,20 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +variables: + CAPELLA2POLARION_DEBUG: "1" + +capella2polarion_synchronise_diagrams: + needs: + - job: update_capella_diagram_cache + artifacts: true + + script: + - pip install git+https://git.tech.rz.db.de/se-toolchain/capella-tools/rm-bridge/capella2polarion.git@$CAPELLA2POLARION_REVISION + - > + python \ + -m capella2polarion \ + $([[ $CAPELLA2POLARION_DEBUG -eq 1 ]] && echo '--debug') \ + --project-id=${CAPELLA2POLARION_PROJECT_ID} \ + diagrams \ + ./diagram_cache diff --git a/ci-templates/gitlab/synchronise_elements.yml b/ci-templates/gitlab/synchronise_elements.yml new file mode 100644 index 00000000..35189828 --- /dev/null +++ b/ci-templates/gitlab/synchronise_elements.yml @@ -0,0 +1,25 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +variables: + CAPELLA2POLARION_DEBUG: "1" + +capella2polarion_synchronise_elements: + needs: + - job: capella2polarion_synchronise_diagrams + optional: True + + script: + - pip install git+https://git.tech.rz.db.de/se-toolchain/capella-tools/rm-bridge/capella2polarion.git@$CAPELLA2POLARION_REVISION + - > + python \ + -m capella2polarion \ + $([[ $CAPELLA2POLARION_DEBUG -eq 1 ]] && echo '--debug') \ + --project-id=${CAPELLA2POLARION_PROJECT_ID:?} \ + model-elements \ + "${CAPELLA2POLARION_MODEL_JSON:?}" \ + ${CAPELLA2POLARION_CONFIG:?} + + artifacts: + paths: + - elements_index.yaml diff --git a/docs/Makefile b/docs/Makefile new file mode 100644 index 00000000..273f24fc --- /dev/null +++ b/docs/Makefile @@ -0,0 +1,31 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: CC0-1.0 + +# Minimal makefile for Sphinx documentation +# + +# You can set these variables from the command line, and also +# from the environment for the first two. +SPHINXOPTS ?= +SPHINXBUILD ?= sphinx-build +SOURCEDIR = source +BUILDDIR = build + +# Put it first so that "make" without argument is like "make help". +help: + @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) + +.PHONY: help Makefile + +# Auto-generate API documentation +apidoc: + sphinx-apidoc --module-first --output-dir source/code --force .. + +.PHONY: apidoc + +html: apidoc + +# Catch-all target: route all unknown targets to Sphinx using the new +# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). +%: Makefile + @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) diff --git a/docs/make.bat b/docs/make.bat new file mode 100644 index 00000000..ab614db8 --- /dev/null +++ b/docs/make.bat @@ -0,0 +1,37 @@ +@ECHO OFF +REM Copyright DB Netz AG and contributors +REM SPDX-License-Identifier: CC0-1.0 + +pushd %~dp0 + +REM Command file for Sphinx documentation + +if "%SPHINXBUILD%" == "" ( + set SPHINXBUILD=sphinx-build +) +set SOURCEDIR=source +set BUILDDIR=build + +%SPHINXBUILD% >NUL 2>NUL +if errorlevel 9009 ( + echo. + echo.The 'sphinx-build' command was not found. Make sure you have Sphinx + echo.installed, then set the SPHINXBUILD environment variable to point + echo.to the full path of the 'sphinx-build' executable. Alternatively you + echo.may add the Sphinx directory to PATH. + echo. + echo.If you don't have Sphinx installed, grab it from + echo.https://www.sphinx-doc.org/ + exit /b 1 +) + +if "%1" == "" goto help + +%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% +goto end + +:help +%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% + +:end +popd diff --git a/docs/source/_static/github-logo.svg b/docs/source/_static/github-logo.svg new file mode 100644 index 00000000..a407b96c --- /dev/null +++ b/docs/source/_static/github-logo.svg @@ -0,0 +1,9 @@ + + + + + diff --git a/docs/source/conf.py b/docs/source/conf.py new file mode 100644 index 00000000..c79b44ec --- /dev/null +++ b/docs/source/conf.py @@ -0,0 +1,117 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 +"""Configuration file for Sphinx.""" + + +# -- Path setup -------------------------------------------------------------- + +# If extensions (or modules to document with autodoc) are in another directory, +# add these directories to sys.path here. If the directory is relative to the +# documentation root, use os.path.abspath to make it absolute, like shown here. +# +import os +import sys + +sys.path.insert(0, os.path.abspath("../..")) + +import capella2polarion + +# -- Project information ----------------------------------------------------- + +try: + import tomllib +except ImportError: + import tomli as tomllib # type: ignore[no-redef] +with open("../../pyproject.toml", "rb") as f: + _metadata = tomllib.load(f)["project"] + +project = "capella2polarion" +author = _metadata["authors"][0]["name"] +copyright = f"{author} and the {_metadata['name']} contributors" + + +# -- General configuration --------------------------------------------------- + +# Add any Sphinx extension module names here, as strings. They can be +# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom +# ones. +extensions = [ + "sphinx.ext.autodoc", + "sphinx.ext.intersphinx", + "sphinx.ext.napoleon", + "sphinx_copybutton", +] + +# Add any paths that contain templates here, relative to this directory. +templates_path = ["_templates"] + +# List of patterns, relative to source directory, that match files and +# directories to ignore when looking for source files. +# This pattern also affects html_static_path and html_extra_path. +# exclude_patterns = [] + + +# -- General information about the project ----------------------------------- + +# The version info for the project you're documenting, acts as replacement for +# |version| and |release|, also used in various other places throughout the +# built documents. + +# The full version, including alpha/beta/rc tags. +version = capella2polarion.__version__ +rst_epilog = """ +.. |Project| replace:: {project} +.. |Version| replace:: {version} +""".format( + project=project, version=version +) + + +# -- Options for copy-button ------------------------------------------------- +copybutton_here_doc_delimiter = "EOT" +copybutton_line_continuation_character = "\\" + + +# -- Options for auto-doc ---------------------------------------------------- +autoclass_content = "class" + + +# -- Options for napoleon ---------------------------------------------------- +napoleon_google_docstring = False +napoleon_include_init_with_doc = True + + +# -- Options for Intersphinx output ------------------------------------------ +intersphinx_mapping = { + "python": ("https://docs.python.org/3", None), +} + + +# -- Options for HTML output ------------------------------------------------- + +# The theme to use for HTML and HTML Help pages. See the documentation for +# a list of builtin themes. + +html_theme = "furo" +html_theme_options = { + "footer_icons": [ + { + "name": "GitHub", + "url": "https://github.com/DSD-DBS/capella2polarion", + "html": '', + "class": "", + }, + ], +} + + +# -- Extra options for Furo theme -------------------------------------------- + +pygments_style = "tango" +pygments_dark_style = "monokai" + + +# Add any paths that contain custom static files (such as style sheets) here, +# relative to this directory. They are copied after the builtin static files, +# so a file named "default.css" will overwrite the builtin "default.css". +html_static_path = ["_static"] diff --git a/docs/source/configuration.rst b/docs/source/configuration.rst new file mode 100644 index 00000000..eb83c638 --- /dev/null +++ b/docs/source/configuration.rst @@ -0,0 +1,45 @@ +.. + Copyright DB Netz AG and contributors + SPDX-License-Identifier: Apache-2.0 + +.. _config: + +YAML +==== +To control the migration of model elements, you can use the following YAML +file. In this file, you can specify the layer and class type for matching +objects. If an item is a dictionary, it means there are work item links to be +migrated. Make sure to use the attribute names on the capellambse object +correctly. + +.. literalinclude:: ../../tests/data/model_elements/config.yaml + :language: yaml + :lines: 4- + +The first section is a general configuration where you can set links to be +migrated for all class types. For example, ``parent`` and ``description_reference`` +are links that will be applied to all specified class types. Since ``Class`` is a +common class type that exists in all layers, links specific to ``Class`` can be +specified here to avoid duplication. + +Polarion +======== +In general, if an attribute is not configured, it will still be accepted and +created via the Rest API. However, to be able to make ``GET`` requests, you +need to configure your Polarion project correctly. The matching of diagrams and +model elements is done using the ``uuid_capella`` attribute, which needs to be +declared in the ``Custom Fields`` section. Simply choose ``All Types`` for this +attribute. + +To have icons for your model elements, you need to declare the work item type +in the ``workitem-type-enum.xml`` file in the Polarion administration panel. +This file is an enumeration file where the work item type IDs should follow the +camel case pattern (e.g., ``operationalCapability`` for +``OperationalCapability``). + +To generate clickable linked work items, you need to configure the link role +enumerations in the ``workitem-link-role-enum.xml`` file. Here, the ID should +match the attributes of the capellambse object (e.g., ``involved_activities``), +or you can define custom attributes that require custom code implementation +(e.g., ``description_reference`` links for references to objects in the +description). diff --git a/docs/source/index.rst b/docs/source/index.rst new file mode 100644 index 00000000..87164178 --- /dev/null +++ b/docs/source/index.rst @@ -0,0 +1,35 @@ +.. + Copyright DB Netz AG and contributors + SPDX-License-Identifier: Apache-2.0 + +capella2polarion +================ + +A tool to migrate Capella content to a Polarion project as work items. + +Diagrams +-------- +Migrate diagrams from a diagram cache (pipeline artifact from a capella diagram +cache) job run to Polarion as work items. The whole folder with the +``index.json`` and the SVGs is needed. + +Model-Elements +-------------- +Migrate any model element from a ``capellambse.MelodyModel`` to Polarion as a +work item. With appropriate :ref:`configuration ` on Polarion and an +according config YAML file, any attribute on the capellambse object can be +migrated as a work item link if (and only if) the target object exists as a +work item already. In order to generate diagram references, make sure to +execute the model-elements migration after the diagram migration. + +.. toctree:: + :maxdepth: 2 + :caption: Configuration: + + configuration + +.. toctree:: + :maxdepth: 3 + :caption: API reference + + code/modules diff --git a/git-conventional-commits.json b/git-conventional-commits.json new file mode 100644 index 00000000..525cbf0e --- /dev/null +++ b/git-conventional-commits.json @@ -0,0 +1,18 @@ +{ + "convention" : { + "commitTypes": [ + "build", + "chore", + "ci", + "docs", + "feat", + "fix", + "merge", + "perf", + "refactor", + "revert", + "test" + ], + "commitScopes": [] + } +} diff --git a/git-conventional-commits.json.license b/git-conventional-commits.json.license new file mode 100644 index 00000000..95e8b6e6 --- /dev/null +++ b/git-conventional-commits.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: CC0-1.0 diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 00000000..2f81de43 --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,197 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +[build-system] +requires = ["setuptools>=64", "setuptools_scm[toml]>=3.4", "wheel"] +build-backend = "setuptools.build_meta" + +[project] +dynamic = ["version"] + +name = "capella2polarion" +description = "Synchronise Capella models with Polarion projects" +readme = "README.md" +requires-python = ">=3.9, <3.12" +license = { text = "Apache-2.0" } +authors = [ + { name = "DB Netz AG" }, +] +keywords = [] +classifiers = [ + "Development Status :: 1 - Planning", + "License :: OSI Approved :: Apache Software License", + "Natural Language :: English", + "Operating System :: OS Independent", + "Programming Language :: Python :: 3 :: Only", + "Programming Language :: Python :: 3.9", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", +] +dependencies = [ + "capellambse", + "click", + "PyYAML", + "polarion-rest-api-client", + "requests", +] + +[project.urls] +Homepage = "https://github.com/DSD-DBS/capella2polarion" +Documentation = "https://dsd-dbs.github.io/capella2polarion" + +[project.optional-dependencies] +dev = [ + "python-dotenv" +] + +docs = [ + "furo", + "sphinx", + "sphinx-copybutton", + "tomli; python_version<'3.11'", +] + +test = [ + "pytest", + "pytest-cov", + "pytest-httpx", +] + +[tool.black] +line-length = 79 +target-version = ["py310"] + +[tool.docformatter] +wrap-descriptions = 72 +wrap-summaries = 79 + +[tool.isort] +profile = 'black' +line_length = 79 + +[tool.mypy] +check_untyped_defs = true +no_implicit_optional = true +show_error_codes = true +warn_redundant_casts = true +warn_unreachable = true +python_version = "3.10" + +[[tool.mypy.overrides]] +module = ["tests.*"] +allow_incomplete_defs = true +allow_untyped_defs = true + +[[tool.mypy.overrides]] +# Untyped third party libraries +module = [ + # ... +] +ignore_missing_imports = true + +[tool.pydocstyle] +convention = "numpy" +add-select = [ + "D212", # Multi-line docstring summary should start at the first line + "D402", # First line should not be the functionโ€™s โ€œsignatureโ€ + "D417", # Missing argument descriptions in the docstring +] +add-ignore = [ + "D201", # No blank lines allowed before function docstring # auto-formatting + "D202", # No blank lines allowed after function docstring # auto-formatting + "D203", # 1 blank line required before class docstring # auto-formatting + "D204", # 1 blank line required after class docstring # auto-formatting + "D211", # No blank lines allowed before class docstring # auto-formatting + "D213", # Multi-line docstring summary should start at the second line +] + +[tool.pylint.master] +extension-pkg-allow-list = [ + "lxml.etree", +] +max-line-length = 79 + +[tool.pylint.messages_control] +disable = [ + "broad-except", + "global-statement", + "import-outside-toplevel", + "invalid-name", + "missing-class-docstring", + "missing-function-docstring", + "missing-module-docstring", + "no-else-break", + "no-else-continue", + "no-else-raise", + "no-else-return", + "protected-access", + "redefined-builtin", + "too-few-public-methods", + "too-many-ancestors", + "too-many-arguments", + "too-many-boolean-expressions", + "too-many-branches", + "too-many-instance-attributes", + "too-many-lines", + "too-many-locals", + "too-many-public-methods", + "too-many-return-statements", + "too-many-statements", + + # Auto-formatting + "bad-indentation", + "inconsistent-quotes", + "missing-final-newline", + "mixed-line-endings", + "multiple-imports", + "multiple-statements", + "trailing-newlines", + "trailing-whitespace", + "unexpected-line-ending-format", + "ungrouped-imports", + "wrong-import-order", + "wrong-import-position", + + # Handled by mypy + "arguments-differ", + "assignment-from-no-return", + "import-error", + "missing-kwoa", + "no-member", + "no-value-for-parameter", + "redundant-keyword-arg", + "signature-differs", + "syntax-error", + "too-many-function-args", + "unbalanced-tuple-unpacking", + "undefined-variable", + "unexpected-keyword-arg", +] +enable = [ + "c-extension-no-member", + "deprecated-pragma", + "use-symbolic-message-instead", + "useless-suppression", +] + +[tool.pytest.ini_options] +addopts = """ + --import-mode=importlib + --strict-config + --strict-markers +""" +testpaths = ["tests"] +xfail_strict = true + +[tool.setuptools] +platforms = ["any"] +zip-safe = false + +[tool.setuptools.package-data] +"*" = ["py.typed"] + +[tool.setuptools.packages.find] +exclude = ["LICENSES"] + +[tool.setuptools_scm] +# This section must exist for setuptools_scm to work diff --git a/tests/conftest.py b/tests/conftest.py new file mode 100644 index 00000000..163144cc --- /dev/null +++ b/tests/conftest.py @@ -0,0 +1,31 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import json +import pathlib +import typing as t + +import capellambse +import pytest + +TEST_DATA_ROOT = pathlib.Path(__file__).parent / "data" +TEST_DIAGRAM_CACHE = TEST_DATA_ROOT / "diagram_cache" +TEST_MODEL_ELEMENTS = TEST_DATA_ROOT / "model_elements" +TEST_MODEL_ELEMENTS_CONFIG = TEST_MODEL_ELEMENTS / "config.yaml" +TEST_MODEL = TEST_DATA_ROOT / "model" / "Melody Model Test.aird" +TEST_HOST = "https://api.example.com" + + +@pytest.fixture +def diagram_cache_index() -> list[dict[str, t.Any]]: + """Return the test diagram cache index.""" + path = TEST_DIAGRAM_CACHE / "index.json" + return json.loads(path.read_text(encoding="utf8")) + + +@pytest.fixture +def model() -> capellambse.MelodyModel: + """Return the test model.""" + return capellambse.MelodyModel(path=TEST_MODEL) diff --git a/tests/data/diagram_cache/_6Td1kOQ8Ee2tXvmHzHzXCA.svg b/tests/data/diagram_cache/_6Td1kOQ8Ee2tXvmHzHzXCA.svg new file mode 100644 index 00000000..b3106e05 --- /dev/null +++ b/tests/data/diagram_cache/_6Td1kOQ8Ee2tXvmHzHzXCA.svg @@ -0,0 +1,171 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Capability + + Capsoup + + Not secret + + Top secret + + Kevin Spacey + + SystemActor + + Gerard Butler + + + + + + ยซ e ยป + + + + + + + ยซ i ยป + + + + + + + + + + + + + diff --git a/tests/data/diagram_cache/_Eiw7IOQ9Ee2tXvmHzHzXCA.svg b/tests/data/diagram_cache/_Eiw7IOQ9Ee2tXvmHzHzXCA.svg new file mode 100644 index 00000000..f43940e2 --- /dev/null +++ b/tests/data/diagram_cache/_Eiw7IOQ9Ee2tXvmHzHzXCA.svg @@ -0,0 +1,663 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + 2nd Specialization of + SuperClass + + & % + [1..*] /num_of_things : Integer + + [1..*] temperature : + Float + + class_typed : 3rd + specialization of SuperClass + + + + + + + SuperClass + + is_amazing : + Boolean + + name : String + + + + + + + 1st Specialization of + SuperClass + + + + + + + 3rd specialization of + SuperClass + + + + + ยฐC + + + + + + ObjectDescription + + class : ObjectType + + classification_confidence + : Float + + [3] relative_position : + Float + + [1..*] possible_names : + String + + + + + + + ObjectType + + HUMAN + + ANIMAL + + UNKNOWN + + + + + m + + + + NumericRefere + nce 2 -> + <undefined> + + + + + + Waypoint + + lat : Float + + lon : Float + + alt : Float + + + + + + + Trajectory + + + + + + + + + + + + [1..*] waypoints + + + + + + + diff --git a/tests/data/diagram_cache/index.json b/tests/data/diagram_cache/index.json new file mode 100644 index 00000000..0e51fd36 --- /dev/null +++ b/tests/data/diagram_cache/index.json @@ -0,0 +1,12 @@ +[ + { + "uuid": "_6Td1kOQ8Ee2tXvmHzHzXCA", + "name": "[CC] Capability", + "success": true + }, + { + "uuid": "_Eiw7IOQ9Ee2tXvmHzHzXCA", + "name": "[CDB] Class tests", + "success": false + } +] diff --git a/tests/data/diagram_cache/index.json.license b/tests/data/diagram_cache/index.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/diagram_cache/index.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/delete_work_item.json b/tests/data/expected_requests/delete_work_item.json new file mode 100644 index 00000000..331e8193 --- /dev/null +++ b/tests/data/expected_requests/delete_work_item.json @@ -0,0 +1,8 @@ +{ + "data": [ + { + "type": "workitems", + "id": "PROJ/MyWorkItemId" + } + ] +} diff --git a/tests/data/expected_requests/delete_work_item.json.license b/tests/data/expected_requests/delete_work_item.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/delete_work_item.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/delete_work_item_link.json b/tests/data/expected_requests/delete_work_item_link.json new file mode 100644 index 00000000..febd200f --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_link.json @@ -0,0 +1,8 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "id": "PROJ/MyWorkItemId/parent/MyProjectId/MyWorkItemId2" + } + ] +} diff --git a/tests/data/expected_requests/delete_work_item_link.json.license b/tests/data/expected_requests/delete_work_item_link.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_link.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/delete_work_item_link_2.json b/tests/data/expected_requests/delete_work_item_link_2.json new file mode 100644 index 00000000..5f049fff --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_link_2.json @@ -0,0 +1,8 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "id": "PROJ/MyWorkItemId2/parent/PROJ/MyWorkItemId3" + } + ] +} diff --git a/tests/data/expected_requests/delete_work_item_link_2.json.license b/tests/data/expected_requests/delete_work_item_link_2.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_link_2.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/delete_work_item_links.json b/tests/data/expected_requests/delete_work_item_links.json new file mode 100644 index 00000000..d9883c8d --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_links.json @@ -0,0 +1,12 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "id": "PROJ/MyWorkItemId/parent/MyProjectId/MyWorkItemId2" + }, + { + "type": "linkedworkitems", + "id": "PROJ/MyWorkItemId/parent/PROJ/MyWorkItemId3" + } + ] +} diff --git a/tests/data/expected_requests/delete_work_item_links.json.license b/tests/data/expected_requests/delete_work_item_links.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/delete_work_item_links.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/delete_work_items.json b/tests/data/expected_requests/delete_work_items.json new file mode 100644 index 00000000..c65ed99b --- /dev/null +++ b/tests/data/expected_requests/delete_work_items.json @@ -0,0 +1,12 @@ +{ + "data": [ + { + "type": "workitems", + "id": "PROJ/MyWorkItemId" + }, + { + "type": "workitems", + "id": "PROJ/MyWorkItemId2" + } + ] +} diff --git a/tests/data/expected_requests/delete_work_items.json.license b/tests/data/expected_requests/delete_work_items.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/delete_work_items.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/patch_work_item_completely.json b/tests/data/expected_requests/patch_work_item_completely.json new file mode 100644 index 00000000..13c4b4a5 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_completely.json @@ -0,0 +1,15 @@ +{ + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + }, + "title": "Title", + "status": "open", + "capella_uuid": "qwertz" + } + } +} diff --git a/tests/data/expected_requests/patch_work_item_completely.json.license b/tests/data/expected_requests/patch_work_item_completely.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_completely.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/patch_work_item_description.json b/tests/data/expected_requests/patch_work_item_description.json new file mode 100644 index 00000000..e36a0d75 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_description.json @@ -0,0 +1,12 @@ +{ + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + } + } + } +} diff --git a/tests/data/expected_requests/patch_work_item_description.json.license b/tests/data/expected_requests/patch_work_item_description.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_description.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/patch_work_item_status.json b/tests/data/expected_requests/patch_work_item_status.json new file mode 100644 index 00000000..a4e13864 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_status.json @@ -0,0 +1,9 @@ +{ + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId", + "attributes": { + "status": "open" + } + } +} diff --git a/tests/data/expected_requests/patch_work_item_status.json.license b/tests/data/expected_requests/patch_work_item_status.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_status.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/patch_work_item_status_deleted.json b/tests/data/expected_requests/patch_work_item_status_deleted.json new file mode 100644 index 00000000..f7397f8c --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_status_deleted.json @@ -0,0 +1,9 @@ +{ + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId", + "attributes": { + "status": "deleted" + } + } +} diff --git a/tests/data/expected_requests/patch_work_item_status_deleted.json.license b/tests/data/expected_requests/patch_work_item_status_deleted.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_status_deleted.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/patch_work_item_title.json b/tests/data/expected_requests/patch_work_item_title.json new file mode 100644 index 00000000..e797fadd --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_title.json @@ -0,0 +1,9 @@ +{ + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId", + "attributes": { + "title": "Title" + } + } +} diff --git a/tests/data/expected_requests/patch_work_item_title.json.license b/tests/data/expected_requests/patch_work_item_title.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/patch_work_item_title.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/post_work_item_link.json b/tests/data/expected_requests/post_work_item_link.json new file mode 100644 index 00000000..304b18f2 --- /dev/null +++ b/tests/data/expected_requests/post_work_item_link.json @@ -0,0 +1,19 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "attributes": { + "role": "relates_to", + "suspect": true + }, + "relationships": { + "workItem": { + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId2" + } + } + } + } + ] +} diff --git a/tests/data/expected_requests/post_work_item_link.json.license b/tests/data/expected_requests/post_work_item_link.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/post_work_item_link.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/post_work_item_links.json b/tests/data/expected_requests/post_work_item_links.json new file mode 100644 index 00000000..f6a8ec99 --- /dev/null +++ b/tests/data/expected_requests/post_work_item_links.json @@ -0,0 +1,34 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "attributes": { + "role": "relates_to", + "suspect": true + }, + "relationships": { + "workItem": { + "data": { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId2" + } + } + } + }, + { + "type": "linkedworkitems", + "attributes": { + "role": "parent", + "suspect": false + }, + "relationships": { + "workItem": { + "data": { + "type": "workitems", + "id": "PROJ/MyWorkItemId3" + } + } + } + } + ] +} diff --git a/tests/data/expected_requests/post_work_item_links.json.license b/tests/data/expected_requests/post_work_item_links.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/post_work_item_links.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/post_workitem.json b/tests/data/expected_requests/post_workitem.json new file mode 100644 index 00000000..bcf76efc --- /dev/null +++ b/tests/data/expected_requests/post_workitem.json @@ -0,0 +1,17 @@ +{ + "data": [ + { + "type": "workitems", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + }, + "status": "open", + "title": "Title", + "type": "task", + "capella_uuid": "asdfg" + } + } + ] +} diff --git a/tests/data/expected_requests/post_workitem.json.license b/tests/data/expected_requests/post_workitem.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/post_workitem.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/expected_requests/post_workitems.json b/tests/data/expected_requests/post_workitems.json new file mode 100644 index 00000000..df26693b --- /dev/null +++ b/tests/data/expected_requests/post_workitems.json @@ -0,0 +1,43 @@ +{ + "data": [ + { + "type": "workitems", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + }, + "status": "open", + "title": "Title", + "type": "task", + "capella_uuid": "asdfg" + } + }, + { + "type": "workitems", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + }, + "status": "open", + "title": "Title", + "type": "task", + "capella_uuid": "asdfg" + } + }, + { + "type": "workitems", + "attributes": { + "description": { + "type": "text/html", + "value": "My text value" + }, + "status": "open", + "title": "Title", + "type": "task", + "capella_uuid": "asdfg" + } + } + ] +} diff --git a/tests/data/expected_requests/post_workitems.json.license b/tests/data/expected_requests/post_workitems.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/expected_requests/post_workitems.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/created_work_item_links.json b/tests/data/mock_api_responses/created_work_item_links.json new file mode 100644 index 00000000..c47d8377 --- /dev/null +++ b/tests/data/mock_api_responses/created_work_item_links.json @@ -0,0 +1,11 @@ +{ + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/parent/MyProjectId/MyLinkedWorkItemId", + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId/MyLinkedWorkItemId?revision=1234" + } + } + ] +} diff --git a/tests/data/mock_api_responses/created_work_item_links.json.license b/tests/data/mock_api_responses/created_work_item_links.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/created_work_item_links.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/created_work_items.json b/tests/data/mock_api_responses/created_work_items.json new file mode 100644 index 00000000..810c93de --- /dev/null +++ b/tests/data/mock_api_responses/created_work_items.json @@ -0,0 +1,12 @@ +{ + "data": [ + { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId", + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId?revision=1234", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitem?id=MyWorkItemId&revision=1234" + } + } + ] +} diff --git a/tests/data/mock_api_responses/created_work_items.json.license b/tests/data/mock_api_responses/created_work_items.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/created_work_items.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/error.json b/tests/data/mock_api_responses/error.json new file mode 100644 index 00000000..2242271f --- /dev/null +++ b/tests/data/mock_api_responses/error.json @@ -0,0 +1,13 @@ +{ + "errors": [ + { + "status": "400", + "title": "Bad Request", + "detail": "Unexpected token, BEGIN_ARRAY expected, but was : BEGIN_OBJECT (at $.data)", + "source": { + "pointer": "$.data", + "parameter": "revision" + } + } + ] +} diff --git a/tests/data/mock_api_responses/error.json.license b/tests/data/mock_api_responses/error.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/error.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/get_linked_work_items_next_page.json b/tests/data/mock_api_responses/get_linked_work_items_next_page.json new file mode 100644 index 00000000..cd09875c --- /dev/null +++ b/tests/data/mock_api_responses/get_linked_work_items_next_page.json @@ -0,0 +1,40 @@ +{ + "meta": { + "totalCount": 0 + }, + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/parent/MyProjectId/MyLinkedWorkItemId", + "revision": "1234", + "attributes": { + "revision": "1234", + "role": "relates_to", + "suspect": true + }, + "relationships": { + "workItem": { + "data": { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId2", + "revision": "1234" + } + } + }, + "meta": {}, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId/MyLinkedWorkItemId?revision=1234" + } + } + ], + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=5", + "first": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=1", + "prev": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=4", + "next": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=6", + "last": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=9" + } +} diff --git a/tests/data/mock_api_responses/get_linked_work_items_next_page.json.license b/tests/data/mock_api_responses/get_linked_work_items_next_page.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/get_linked_work_items_next_page.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json b/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json new file mode 100644 index 00000000..2a6ab798 --- /dev/null +++ b/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json @@ -0,0 +1,39 @@ +{ + "meta": { + "totalCount": 0 + }, + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/relates_to/MyProjectId/MyWorkItemId2", + "revision": "1234", + "attributes": { + "revision": "1234", + "role": "relates_to", + "suspect": true + }, + "relationships": { + "workItem": { + "data": { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId2", + "revision": "1234" + } + } + }, + "meta": {}, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId/MyLinkedWorkItemId?revision=1234" + } + } + ], + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=5", + "first": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=1", + "prev": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=4", + "last": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems/parent/MyProjectId?page%5Bsize%5D=10&page%5Bnumber%5D=9" + } +} diff --git a/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json.license b/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/get_linked_work_items_no_next_page.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/project.json b/tests/data/mock_api_responses/project.json new file mode 100644 index 00000000..913ea61e --- /dev/null +++ b/tests/data/mock_api_responses/project.json @@ -0,0 +1,53 @@ +{ + "data": { + "type": "projects", + "id": "MyProjectId", + "revision": "1234", + "attributes": { + "active": true, + "color": "Color", + "description": { + "type": "text/plain", + "value": "My text value" + }, + "finish": "1970-01-01", + "icon": "Icon", + "id": "MyProjectId", + "lockWorkRecordsDate": "1970-01-01", + "name": "Name", + "start": "1970-01-01", + "trackerPrefix": "Tracker Prefix" + }, + "relationships": { + "lead": { + "data": { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + } + }, + "meta": { + "errors": [ + { + "status": "400", + "title": "Bad Request", + "detail": "Unexpected token, BEGIN_ARRAY expected, but was : BEGIN_OBJECT (at $.data)", + "source": { + "pointer": "$.data", + "parameter": "revision" + } + } + ] + }, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId?revision=1234" + } + }, + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId?revision=1234" + } +} diff --git a/tests/data/mock_api_responses/project.json.license b/tests/data/mock_api_responses/project.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/project.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/workitems_next_page.json b/tests/data/mock_api_responses/workitems_next_page.json new file mode 100644 index 00000000..3c5e15f2 --- /dev/null +++ b/tests/data/mock_api_responses/workitems_next_page.json @@ -0,0 +1,174 @@ +{ + "meta": { + "totalCount": 0 + }, + "data": [ + { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId", + "revision": "1234", + "attributes": { + "created": "1970-01-01T00:00:00Z", + "description": { + "type": "text/html", + "value": "My text value" + }, + "dueDate": "1970-01-01", + "hyperlinks": [ + { + "role": "ref_ext", + "uri": "https://polarion.plm.automation.siemens.com" + } + ], + "id": "MyWorkItemId", + "initialEstimate": "5 1/2d", + "outlineNumber": "1.11", + "plannedEnd": "1970-01-01T00:00:00Z", + "plannedStart": "1970-01-01T00:00:00Z", + "priority": "90.0", + "remainingEstimate": "5 1/2d", + "resolution": "done", + "resolvedOn": "1970-01-01T00:00:00Z", + "severity": "blocker", + "status": "open", + "timeSpent": "5 1/2d", + "title": "Title", + "type": "task", + "updated": "1970-01-01T00:00:00Z", + "capella_uuid": "asdfg" + }, + "relationships": { + "assignee": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "attachments": { + "data": [ + { + "type": "workitem_attachments", + "id": "MyProjectId/MyWorkItemId/MyAttachmentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/attachments?revision=1234" + } + }, + "author": { + "data": { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + }, + "categories": { + "data": [ + { + "type": "categories", + "id": "MyProjectId/MyCategoryId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "comments": { + "data": [ + { + "type": "workitem_comments", + "id": "MyProjectId/MyWorkItemId/MyCommentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/comments?revision=1234" + } + }, + "linkedWorkItems": { + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/parent/MyProjectId/MyLinkedWorkItemId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems?revision=1234" + } + }, + "module": { + "data": { + "type": "documents", + "id": "MyProjectId/MySpaceId/MyDocumentId", + "revision": "1234" + } + }, + "plannedIn": { + "data": [ + { + "type": "plans", + "id": "MyProjectId/MyPlanId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "project": { + "data": { + "type": "projects", + "id": "MyProjectId", + "revision": "1234" + } + }, + "watches": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + } + }, + "meta": {}, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId?revision=1234", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitem?id=MyWorkItemId&revision=1234" + } + } + ], + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=5", + "first": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=1", + "prev": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=4", + "next": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=6", + "last": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=9", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitems" + } +} diff --git a/tests/data/mock_api_responses/workitems_next_page.json.license b/tests/data/mock_api_responses/workitems_next_page.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/workitems_next_page.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/workitems_next_page_error.json b/tests/data/mock_api_responses/workitems_next_page_error.json new file mode 100644 index 00000000..65114a5b --- /dev/null +++ b/tests/data/mock_api_responses/workitems_next_page_error.json @@ -0,0 +1,186 @@ +{ + "meta": { + "totalCount": 0 + }, + "data": [ + { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId", + "revision": "1234", + "attributes": { + "created": "1970-01-01T00:00:00Z", + "description": { + "type": "text/html", + "value": "My text value" + }, + "dueDate": "1970-01-01", + "hyperlinks": [ + { + "role": "ref_ext", + "uri": "https://polarion.plm.automation.siemens.com" + } + ], + "id": "MyWorkItemId", + "initialEstimate": "5 1/2d", + "outlineNumber": "1.11", + "plannedEnd": "1970-01-01T00:00:00Z", + "plannedStart": "1970-01-01T00:00:00Z", + "priority": "90.0", + "remainingEstimate": "5 1/2d", + "resolution": "done", + "resolvedOn": "1970-01-01T00:00:00Z", + "severity": "blocker", + "status": "open", + "timeSpent": "5 1/2d", + "title": "Title", + "type": "task", + "updated": "1970-01-01T00:00:00Z", + "capella_uuid": "asdfg" + }, + "relationships": { + "assignee": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "attachments": { + "data": [ + { + "type": "workitem_attachments", + "id": "MyProjectId/MyWorkItemId/MyAttachmentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/attachments?revision=1234" + } + }, + "author": { + "data": { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + }, + "categories": { + "data": [ + { + "type": "categories", + "id": "MyProjectId/MyCategoryId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "comments": { + "data": [ + { + "type": "workitem_comments", + "id": "MyProjectId/MyWorkItemId/MyCommentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/comments?revision=1234" + } + }, + "linkedWorkItems": { + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/parent/MyProjectId/MyLinkedWorkItemId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems?revision=1234" + } + }, + "module": { + "data": { + "type": "documents", + "id": "MyProjectId/MySpaceId/MyDocumentId", + "revision": "1234" + } + }, + "plannedIn": { + "data": [ + { + "type": "plans", + "id": "MyProjectId/MyPlanId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "project": { + "data": { + "type": "projects", + "id": "MyProjectId", + "revision": "1234" + } + }, + "watches": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + } + }, + "meta": { + "errors": [ + { + "status": "400", + "title": "Bad Request", + "detail": "Unexpected token, BEGIN_ARRAY expected, but was : BEGIN_OBJECT (at $.data)", + "source": { + "pointer": "$.data", + "parameter": "revision" + } + } + ] + }, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId?revision=1234", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitem?id=MyWorkItemId&revision=1234" + } + } + ], + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=5", + "first": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=1", + "prev": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=4", + "next": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=6", + "last": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=9", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitems" + } +} diff --git a/tests/data/mock_api_responses/workitems_next_page_error.json.license b/tests/data/mock_api_responses/workitems_next_page_error.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/workitems_next_page_error.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/mock_api_responses/workitems_no_next_page.json b/tests/data/mock_api_responses/workitems_no_next_page.json new file mode 100644 index 00000000..a2656b1d --- /dev/null +++ b/tests/data/mock_api_responses/workitems_no_next_page.json @@ -0,0 +1,173 @@ +{ + "meta": { + "totalCount": 0 + }, + "data": [ + { + "type": "workitems", + "id": "MyProjectId/MyWorkItemId2", + "revision": "1234", + "attributes": { + "created": "1970-01-01T00:00:00Z", + "description": { + "type": "text/html", + "value": "My text value" + }, + "dueDate": "1970-01-01", + "hyperlinks": [ + { + "role": "ref_ext", + "uri": "https://polarion.plm.automation.siemens.com" + } + ], + "id": "MyWorkItemId", + "initialEstimate": "5 1/2d", + "outlineNumber": "1.11", + "plannedEnd": "1970-01-01T00:00:00Z", + "plannedStart": "1970-01-01T00:00:00Z", + "priority": "90.0", + "remainingEstimate": "5 1/2d", + "resolution": "done", + "resolvedOn": "1970-01-01T00:00:00Z", + "severity": "blocker", + "status": "open", + "timeSpent": "5 1/2d", + "title": "Title", + "type": "task", + "updated": "1970-01-01T00:00:00Z", + "capella_uuid": "asdfgh" + }, + "relationships": { + "assignee": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "attachments": { + "data": [ + { + "type": "workitem_attachments", + "id": "MyProjectId/MyWorkItemId/MyAttachmentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/attachments?revision=1234" + } + }, + "author": { + "data": { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + }, + "categories": { + "data": [ + { + "type": "categories", + "id": "MyProjectId/MyCategoryId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "comments": { + "data": [ + { + "type": "workitem_comments", + "id": "MyProjectId/MyWorkItemId/MyCommentId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/comments?revision=1234" + } + }, + "linkedWorkItems": { + "data": [ + { + "type": "linkedworkitems", + "id": "MyProjectId/MyWorkItemId/parent/MyProjectId/MyLinkedWorkItemId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + }, + "links": { + "related": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId/linkedworkitems?revision=1234" + } + }, + "module": { + "data": { + "type": "documents", + "id": "MyProjectId/MySpaceId/MyDocumentId", + "revision": "1234" + } + }, + "plannedIn": { + "data": [ + { + "type": "plans", + "id": "MyProjectId/MyPlanId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + }, + "project": { + "data": { + "type": "projects", + "id": "MyProjectId", + "revision": "1234" + } + }, + "watches": { + "data": [ + { + "type": "users", + "id": "MyUserId", + "revision": "1234" + } + ], + "meta": { + "totalCount": 0 + } + } + }, + "meta": {}, + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems/MyWorkItemId?revision=1234", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitem?id=MyWorkItemId&revision=1234" + } + } + ], + "included": [ + {} + ], + "links": { + "self": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=5", + "first": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=1", + "prev": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=4", + "last": "server-host-name/application-path/projects/MyProjectId/workitems?page%5Bsize%5D=10&page%5Bnumber%5D=9", + "portal": "server-host-name/application-path/polarion/redirect/project/MyProjectId/workitems" + } +} diff --git a/tests/data/mock_api_responses/workitems_no_next_page.json.license b/tests/data/mock_api_responses/workitems_no_next_page.json.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/mock_api_responses/workitems_no_next_page.json.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/model/.project b/tests/data/model/.project new file mode 100644 index 00000000..746ec363 --- /dev/null +++ b/tests/data/model/.project @@ -0,0 +1,11 @@ + + + model + + + + + + + + diff --git a/tests/data/model/.project.license b/tests/data/model/.project.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/model/.project.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/model/Melody Model Test.afm b/tests/data/model/Melody Model Test.afm new file mode 100644 index 00000000..02f4486c --- /dev/null +++ b/tests/data/model/Melody Model Test.afm @@ -0,0 +1,6 @@ + + + + + + diff --git a/tests/data/model/Melody Model Test.afm.license b/tests/data/model/Melody Model Test.afm.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/model/Melody Model Test.afm.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/model/Melody Model Test.aird b/tests/data/model/Melody Model Test.aird new file mode 100644 index 00000000..513cb8f7 --- /dev/null +++ b/tests/data/model/Melody Model Test.aird @@ -0,0 +1,13208 @@ + + + + Melody%20Model%20Test.afm + Melody%20Model%20Test.capella + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + +
+
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + +
+ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelSize + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + uid + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + strokeColor + size + routingStyle + + + + + + + + + + strokeColor + size + routingStyle + + + + + + + + + + strokeColor + size + routingStyle + + + + + + + + + + color + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + color + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + borderColor + borderSize + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + borderColor + borderSize + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelAlignment + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + borderColor + borderSize + labelColor + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelFormat + strike_through + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelFormat + underline + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelFormat + borderColor + borderSize + labelColor + italic + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + labelAlignment + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + borderColor + borderSize + + + + + + + + + + + strokeColor + size + routingStyle + + + + + + + + + + strokeColor + size + + + + + + + + + + strokeColor + size + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + strokeColor + size + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + backgroundColor + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + lineStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + bold + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + + + + + + + + + + size + routingStyle + strokeColor + + + + + + + + + + + size + routingStyle + strokeColor + + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + color + + + + + + + + + strokeColor + size + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + italic + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + italic + + + + + + + + + + + + + + + + + + + + + + + + + + routingStyle + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + routingStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + italic + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + italic + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + labelColor + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + backgroundColor + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + lineStyle + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + KEEP_LOCATION + KEEP_SIZE + KEEP_RATIO + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/model/Melody Model Test.aird.license b/tests/data/model/Melody Model Test.aird.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/model/Melody Model Test.aird.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/model/Melody Model Test.capella b/tests/data/model/Melody Model Test.capella new file mode 100644 index 00000000..5c86a526 --- /dev/null +++ b/tests/data/model/Melody Model Test.capella @@ -0,0 +1,3227 @@ + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + A test spec. + capella:linkedText + + + + + + This is a test context.<a href="0e0164c3-076e-42c1-8f82-7a43ab84385c"/> + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + The actor lives in a world where predators exist +AND +A <a href="e6e4d30c-4d80-4899-8d8d-1350239c15a7"/> is near the actor + capella:linkedText + + + + + The predator no longer exists +OR +The predator is far away + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <a href="dd2d0dab-a35f-4104-91e5-b412f35cba15"/> + capella:linkedText + + + + + The actor feels sated + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Spot a huntable animal + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Actor feels well rested + capella:linkedText + + + + + + + Actor feels sated + capella:linkedText + + + + + + + Food is cooked + capella:linkedText + + + + + + + Revenge + capella:linkedText + + + + + + + No revenge + capella:linkedText + + + + + + + Success + capella:linkedText + + + + + + + Hunt failed + capella:linkedText + + + + + + + Reached safety + capella:linkedText + + + + + + + Hunt ended + + capella:linkedText + 2 + + + + + + + Actor feels hungry + self.hunger >= 0.8 + capella:linkedText + Python + + + + + + + Actor gets too old + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + This is a test context. + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + Actor feels well rested + capella:linkedText + + + + + + + + Actor feels sated + capella:linkedText + + + + + + + + Food is cooked + capella:linkedText + + + + + + + + Revenge + capella:linkedText + + + + + + + + No revenge + capella:linkedText + + + + + + + + Success + capella:linkedText + + + + + + + + Hunt failed + capella:linkedText + + + + + + + + Reached safety + capella:linkedText + + + + + + + + Hunt ended + capella:linkedText + + + + + + + + Actor feels hungry + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + <a href="a0159943-264f-4a97-a245-565fb6bf9db4"/> + capella:linkedText + + + + + + + + + + + + + + + + + + + + + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + capella:linkedText + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + + diff --git a/tests/data/model/Melody Model Test.capella.license b/tests/data/model/Melody Model Test.capella.license new file mode 100644 index 00000000..c3fb0221 --- /dev/null +++ b/tests/data/model/Melody Model Test.capella.license @@ -0,0 +1,2 @@ +Copyright DB Netz AG and contributors +SPDX-License-Identifier: Apache-2.0 diff --git a/tests/data/model_elements/config.yaml b/tests/data/model_elements/config.yaml new file mode 100644 index 00000000..e8647854 --- /dev/null +++ b/tests/data/model_elements/config.yaml @@ -0,0 +1,43 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +"*": # All layers + "*": # All class types + - parent # Specify workitem links + - description_reference # Custom attribute + Class: + - state_machines + +oa: # Specify below + - OperationalCapability: # Capella Type with references + - involved_activities # Specify workitem links + - involved_entities + + - OperationalActivity # Capella Type w/o references + - OperationalEntity # Custom Type maps to Entity + - OperationalInteraction # Custom Type maps to FunctionalExchange + - CommunicationMean + - Class + - StateMachine + - Constraint + +sa: + - SystemComponent: + - allocated_functions + - SystemActor: # Custom Type that doesn't exist in Capella + - allocated_functions + - SystemFunction + - ComponentExchange: + - allocated_functional_exchanges + - ComponentPort + - FunctionalExchange: + - exchanged_items + - ExchangeItem + - Class + - Constraint + +pa: + - PhysicalComponent: + - allocated_functions + - PhysicalActor: + - allocated_functions diff --git a/tests/test_api_client.py b/tests/test_api_client.py new file mode 100644 index 00000000..bb331fe8 --- /dev/null +++ b/tests/test_api_client.py @@ -0,0 +1,580 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +import json +import pathlib + +import pytest +import pytest_httpx + +from capella2polarion import polarion_api +from capella2polarion.polarion_api import client as polarion_client + +TEST_DATA_ROOT = pathlib.Path(__file__).parent / "data" +TEST_RESPONSES = TEST_DATA_ROOT / "mock_api_responses" +TEST_REQUESTS = TEST_DATA_ROOT / "expected_requests" + + +@pytest.fixture() +def client(): + yield polarion_client.OpenAPIPolarionProjectClient( + "PROJ", "capella_uuid", False, "http://127.0.0.1/api", "PAT123" + ) + + +def test_api_authentication( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "project.json") as f: + httpx_mock.add_response( + match_headers={"Authorization": "Bearer PAT123"}, + json=json.load(f), + ) + client.project_exists() + + +def test_check_existing_project( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "project.json") as f: + httpx_mock.add_response(json=json.load(f)) + assert client.project_exists() + + +def test_check_non_existing_project( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(status_code=404) + assert not client.project_exists() + + +def test_get_all_work_items_multi_page( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "workitems_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + with open(TEST_RESPONSES / "workitems_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_items = client.get_all_work_items( + "", + {"fields[workitems]": f"id"}, + ) + query = { + "fields[workitems]": "id", + "page[size]": "100", + "page[number]": "1", + "query": "", + } + reqs = httpx_mock.get_requests() + + assert reqs[0].method == "GET" + assert dict(reqs[0].url.params) == query + assert reqs[1].method == "GET" + query["page[number]"] = "2" + assert dict(reqs[1].url.params) == query + assert len(work_items) == 2 + assert len(reqs) == 2 + + +def test_get_all_work_items_single_page( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "workitems_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_items = client.get_all_work_items("") + reqs = httpx_mock.get_requests() + assert reqs[0].method == "GET" + assert len(work_items) == 1 + assert len(reqs) == 1 + assert work_items[0] == polarion_api.WorkItem( + "MyWorkItemId2", + "Title", + "text/html", + "My text value", + "task", + "asdfgh", + "open", + {"capella_uuid": "asdfgh"}, + ) + + +def test_get_all_work_items_faulty_item( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "workitems_next_page_error.json") as f: + httpx_mock.add_response(json=json.load(f)) + + with open(TEST_RESPONSES / "workitems_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_items = client.get_all_work_items("") + reqs = httpx_mock.get_requests() + assert reqs[0].method == "GET" + assert len(work_items) == 1 + assert len(reqs) == 2 + + +def test_create_work_item( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "created_work_items.json") as f: + httpx_mock.add_response(201, json=json.load(f)) + work_item = polarion_api.WorkItem( + title="Title", + description_type="text/html", + description="My text value", + status="open", + type="task", + uuid_capella="asdfg", + ) + + client.create_work_item(work_item) + + req = httpx_mock.get_request() + assert req.method == "POST" + with open(TEST_REQUESTS / "post_workitem.json") as f: + expected = json.load(f) + + assert json.loads(req.content.decode()) == expected + + +def test_create_work_items_successfully( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "created_work_items.json") as f: + httpx_mock.add_response(201, json=json.load(f)) + work_item = polarion_api.WorkItem( + title="Title", + description_type="text/html", + description="My text value", + status="open", + type="task", + uuid_capella="asdfg", + ) + + client.create_work_items(3 * [work_item]) + + req = httpx_mock.get_request() + + assert req.method == "POST" + with open(TEST_REQUESTS / "post_workitems.json") as f: + expected = json.load(f) + + assert json.loads(req.content.decode()) == expected + + +def test_create_work_items_failed( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "error.json") as f: + httpx_mock.add_response(400, json=json.load(f)) + work_item = polarion_api.WorkItem( + title="Title", + description_type="text/html", + description="My text value", + status="open", + type="task", + uuid_capella="asdfg", + ) + with pytest.raises(polarion_api.PolarionApiException) as exc_info: + client.create_work_items(3 * [work_item]) + + assert exc_info.type is polarion_api.PolarionApiException + assert exc_info.value.args[0][0] == "400" + assert ( + exc_info.value.args[0][1] + == "Unexpected token, BEGIN_ARRAY expected, but was : BEGIN_OBJECT (at $.data)" + ) + + +def test_create_work_items_failed_no_error( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(501, content=b"asdfg") + + work_item = polarion_api.WorkItem( + title="Title", + description_type="text/html", + description="My text value", + status="open", + type="task", + uuid_capella="asdfg", + ) + with pytest.raises(polarion_api.PolarionApiBaseException) as exc_info: + client.create_work_items(3 * [work_item]) + + assert exc_info.type is polarion_api.PolarionApiUnexpectedException + assert exc_info.value.args[0] == 501 + assert exc_info.value.args[1] == b"asdfg" + + +def test_update_work_item_completely( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.update_work_item( + polarion_client.WorkItem( + id="MyWorkItemId", + description_type="text/html", + description="My text value", + title="Title", + status="open", + uuid_capella="qwertz", + ) + ) + + req = httpx_mock.get_request() + + assert req.url.path.endswith("PROJ/workitems/MyWorkItemId") + assert req.method == "PATCH" + with open(TEST_REQUESTS / "patch_work_item_completely.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_update_work_item_description( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.update_work_item( + polarion_client.WorkItem( + id="MyWorkItemId", + description_type="text/html", + description="My text value", + ) + ) + + req = httpx_mock.get_request() + + assert req.url.path.endswith("PROJ/workitems/MyWorkItemId") + assert req.method == "PATCH" + with open(TEST_REQUESTS / "patch_work_item_description.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_update_work_item_title( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.update_work_item( + polarion_client.WorkItem( + id="MyWorkItemId", + title="Title", + ) + ) + + req = httpx_mock.get_request() + + assert req.url.path.endswith("PROJ/workitems/MyWorkItemId") + assert req.method == "PATCH" + with open(TEST_REQUESTS / "patch_work_item_title.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_update_work_item_status( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.update_work_item( + polarion_client.WorkItem( + id="MyWorkItemId", + status="open", + ) + ) + + req = httpx_mock.get_request() + + assert req.url.path.endswith("PROJ/workitems/MyWorkItemId") + assert req.method == "PATCH" + with open(TEST_REQUESTS / "patch_work_item_status.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_delete_work_item_status_mode( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.delete_work_item("MyWorkItemId") + + req = httpx_mock.get_request() + + assert req.method == "PATCH" + with open(TEST_REQUESTS / "patch_work_item_status_deleted.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_delete_work_item_delete_mode( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.delete_polarion_work_items = True + + client.delete_work_item("MyWorkItemId") + + req = httpx_mock.get_request() + + assert req.method == "DELETE" + with open(TEST_REQUESTS / "delete_work_item.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_get_work_item_links_single_page( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "get_linked_work_items_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_item_links = client.get_all_work_item_links( + "MyWorkItemId", include="workitem" + ) + query = { + "fields[linkedworkitems]": "id,role,suspect", + "page[size]": "100", + "page[number]": "1", + "include": "workitem", + } + + reqs = httpx_mock.get_requests() + + assert reqs[0].method == "GET" + assert dict(reqs[0].url.params) == query + assert len(work_item_links) == 1 + assert len(reqs) == 1 + assert work_item_links[0] == polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "relates_to", True, "MyProjectId" + ) + + +def test_get_work_item_links_multi_page( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "get_linked_work_items_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + with open(TEST_RESPONSES / "get_linked_work_items_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_items = client.get_all_work_item_links("MyWorkItemId") + query = { + "fields[linkedworkitems]": "id,role,suspect", + "page[size]": "100", + "page[number]": "1", + } + reqs = httpx_mock.get_requests() + + assert reqs[0].method == "GET" + assert dict(reqs[0].url.params) == query + assert reqs[1].method == "GET" + query["page[number]"] = "2" + assert dict(reqs[1].url.params) == query + assert len(work_items) == 2 + assert len(reqs) == 2 + + +def test_delete_work_item_link( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.delete_work_item_link( + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "parent", True, "MyProjectId" + ) + ) + + req = httpx_mock.get_request() + + assert req.method == "DELETE" + with open(TEST_REQUESTS / "delete_work_item_link.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_delete_work_item_links( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.delete_work_item_links( + [ + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "parent", True, "MyProjectId" + ), + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId3", "parent", True + ), + ] + ) + + req = httpx_mock.get_request() + + assert req.method == "DELETE" + with open(TEST_REQUESTS / "delete_work_item_links.json") as f: + assert json.loads(req.content.decode()) == json.load(f) + + +def test_delete_work_item_links_multi_primary( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + httpx_mock.add_response(204) + + client.delete_work_item_links( + [ + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "parent", True, "MyProjectId" + ), + polarion_api.WorkItemLink( + "MyWorkItemId2", "MyWorkItemId3", "parent", True + ), + ] + ) + + reqs = httpx_mock.get_requests() + + assert len(reqs) == 2 + assert reqs[0].method == "DELETE" + assert reqs[1].method == "DELETE" + with open(TEST_REQUESTS / "delete_work_item_link.json") as f: + assert json.loads(reqs[0].content.decode()) == json.load(f) + with open(TEST_REQUESTS / "delete_work_item_link_2.json") as f: + assert json.loads(reqs[1].content.decode()) == json.load(f) + + +def test_create_work_item_link( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "created_work_item_links.json") as f: + httpx_mock.add_response(201, json=json.load(f)) + + client.create_work_item_link( + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "relates_to", True + ) + ) + + req = httpx_mock.get_request() + + assert req.method == "POST" + assert req.url.path.endswith("PROJ/workitems/MyWorkItemId/linkedworkitems") + with open(TEST_REQUESTS / "post_work_item_link.json") as f: + expected = json.load(f) + + assert json.loads(req.content.decode()) == expected + + +def test_create_work_item_links_different_primaries( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "created_work_item_links.json") as f: + content = json.load(f) + + httpx_mock.add_response(201, json=content) + httpx_mock.add_response(201, json=content) + + client.create_work_item_links( + [ + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId2", "relates_to", True + ), + polarion_api.WorkItemLink( + "MyWorkItemId3", "MyWorkItemId2", "relates_to", True + ), + ] + ) + + reqs = httpx_mock.get_requests() + + assert len(reqs) == 2 + assert reqs[0].method == "POST" + assert reqs[1].method == "POST" + + assert reqs[0].url.path.endswith( + "PROJ/workitems/MyWorkItemId/linkedworkitems" + ) + assert reqs[1].url.path.endswith( + "PROJ/workitems/MyWorkItemId3/linkedworkitems" + ) + + with open(TEST_REQUESTS / "post_work_item_link.json") as f: + expected = json.load(f) + + assert json.loads(reqs[0].content.decode()) == expected + assert json.loads(reqs[1].content.decode()) == expected + + +def test_create_work_item_links_same_primaries( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "created_work_item_links.json") as f: + httpx_mock.add_response(201, json=json.load(f)) + + client.create_work_item_links( + [ + polarion_api.WorkItemLink( + "MyWorkItemId", + "MyWorkItemId2", + "relates_to", + True, + "MyProjectId", + ), + polarion_api.WorkItemLink( + "MyWorkItemId", "MyWorkItemId3", "parent", False + ), + ] + ) + + req = httpx_mock.get_request() + + assert req.method == "POST" + with open(TEST_REQUESTS / "post_work_item_links.json") as f: + expected = json.load(f) + + assert json.loads(req.content.decode()) == expected + + +def test_get_work_item_element_mapping( + client: polarion_client.OpenAPIPolarionProjectClient, + httpx_mock: pytest_httpx.HTTPXMock, +): + with open(TEST_RESPONSES / "workitems_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + with open(TEST_RESPONSES / "workitems_no_next_page.json") as f: + httpx_mock.add_response(json=json.load(f)) + + work_item_mapping = client.get_work_item_element_mapping(["task"]) + + reqs = httpx_mock.get_requests() + assert len(reqs) == 2 + assert work_item_mapping == { + "asdfg": "MyWorkItemId", + "asdfgh": "MyWorkItemId2", + } diff --git a/tests/test_cli.py b/tests/test_cli.py new file mode 100644 index 00000000..4f1a160b --- /dev/null +++ b/tests/test_cli.py @@ -0,0 +1,115 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import collections.abc as cabc +import os +import pathlib +import typing as t +from unittest import mock + +import pytest +from click import testing + +import capella2polarion.__main__ as main +from capella2polarion import elements, polarion_api + +# pylint: disable-next=relative-beyond-top-level, useless-suppression +from .conftest import ( # type: ignore[import] + TEST_DIAGRAM_CACHE, + TEST_HOST, + TEST_MODEL, + TEST_MODEL_ELEMENTS_CONFIG, +) + +ELEMENTS_IDX_PATH = pathlib.Path("elements_index.yaml") + + +def prepare_cli_test( + monkeypatch: pytest.MonkeyPatch, return_value: t.Any | cabc.Iterable[t.Any] +) -> mock.MagicMock: + os.environ["POLARION_HOST"] = TEST_HOST + os.environ["POLARION_PAT"] = "1234" + mock_api = mock.MagicMock(spec=polarion_api.OpenAPIPolarionProjectClient) + monkeypatch.setattr(polarion_api, "OpenAPIPolarionProjectClient", mock_api) + mock_get_polarion_id_map = mock.MagicMock() + monkeypatch.setattr(main, "get_polarion_id_map", mock_get_polarion_id_map) + if isinstance(return_value, cabc.Iterable) and not isinstance( + return_value, str + ): + id_map_attr = "side_effect" + else: + id_map_attr = "return_value" + + setattr(mock_get_polarion_id_map, id_map_attr, return_value) + return mock_get_polarion_id_map + + +def test_migrate_diagrams(monkeypatch: pytest.MonkeyPatch): + mock_get_polarion_id_map = prepare_cli_test( + monkeypatch, {"uuid1": "project/W-1", "uuid2": "project/W-2"} + ) + mock_delete_work_items = mock.MagicMock() + monkeypatch.setattr(elements, "delete_work_items", mock_delete_work_items) + mock_update_diagrams = mock.MagicMock() + monkeypatch.setattr( + elements.diagram, "update_diagrams", mock_update_diagrams + ) + mock_create_diagrams = mock.MagicMock() + monkeypatch.setattr( + elements.diagram, "create_diagrams", mock_create_diagrams + ) + command = ["--project-id=project_id", "diagrams", str(TEST_DIAGRAM_CACHE)] + + result = testing.CliRunner().invoke(main.cli, command) + + assert result.exit_code == 0 + assert mock_get_polarion_id_map.call_count == 1 + assert mock_delete_work_items.call_count == 1 + assert mock_update_diagrams.call_count == 1 + assert mock_create_diagrams.call_count == 1 + + +def test_migrate_model_elements(monkeypatch: pytest.MonkeyPatch): + mock_get_polarion_id_map = prepare_cli_test( + monkeypatch, + ( + { + "5b1f761c-3fd3-4f26-bbc5-1b06a6f7b434": "project/W-0", + "uuid1": "project/W-1", + "uuid2": "project/W-2", + }, + {"uuid2": "project/W-2", "uuid3": "project/W-3"}, + {}, + ), + ) + mock_delete_work_items = mock.MagicMock() + monkeypatch.setattr(elements, "delete_work_items", mock_delete_work_items) + mock_update_work_items = mock.MagicMock() + monkeypatch.setattr( + elements.element, "update_work_items", mock_update_work_items + ) + mock_create_work_items = mock.MagicMock() + monkeypatch.setattr( + elements.element, "create_work_items", mock_create_work_items + ) + mock_update_links = mock.MagicMock() + monkeypatch.setattr(elements.element, "update_links", mock_update_links) + + command = [ + "--project-id=project_id", + "model-elements", + str(TEST_MODEL), + str(TEST_MODEL_ELEMENTS_CONFIG), + ] + + result = testing.CliRunner().invoke(main.cli, command) + + assert result.exit_code == 0 + assert mock_get_polarion_id_map.call_count == 3 + assert mock_delete_work_items.call_count == 1 + assert mock_update_work_items.call_count == 1 + assert mock_create_work_items.call_count == 1 + assert mock_update_links.call_count == 2 + assert ELEMENTS_IDX_PATH.exists() diff --git a/tests/test_elements.py b/tests/test_elements.py new file mode 100644 index 00000000..ed54f88e --- /dev/null +++ b/tests/test_elements.py @@ -0,0 +1,401 @@ +# Copyright DB Netz AG and contributors +# SPDX-License-Identifier: Apache-2.0 + +from __future__ import annotations + +import typing as t +from unittest import mock + +import capellambse +import markupsafe +import pytest + +from capella2polarion import elements, polarion_api +from capella2polarion.elements import diagram, element, helpers, serialize + +# pylint: disable-next=relative-beyond-top-level, useless-suppression +from .conftest import TEST_DIAGRAM_CACHE, TEST_HOST # type: ignore[import] + +# pylint: disable=redefined-outer-name +TEST_DIAG_UUID = "_6Td1kOQ8Ee2tXvmHzHzXCA" +TEST_ELEMENT_UUID = "0d2edb8f-fa34-4e73-89ec-fb9a63001440" +TEST_OCAP_UUID = "83d1334f-6180-46c4-a80d-6839341df688" +TEST_DESCR = ( + "

This instance is the mighty Hogwarts. Do you really need a " + "description? Then maybe read the books or watch atleast the epic movies." + "

\n" +) +TEST_WE_UUID = "e37510b9-3166-4f80-a919-dfaac9b696c7" +TEST_E_UUID = "4bf0356c-89dd-45e9-b8a6-e0332c026d33" +TEST_WE_DESCR = ( + '

\n' +) +TEST_ACTOR_UUID = "08e02248-504d-4ed8-a295-c7682a614f66" +TEST_PHYS_COMP = "b9f9a83c-fb02-44f7-9123-9d86326de5f1" +TEST_PHYS_NODE = "8a6d68c8-ac3d-4654-a07e-ada7adeed09f" +TEST_POL_ID_MAP = {TEST_E_UUID: "TEST"} +TEST_POL_TYPE_MAP = { + TEST_ELEMENT_UUID: "LogicalComponent", + TEST_OCAP_UUID: "OperationalCapability", + TEST_WE_UUID: "Entity", +} +TEST_SER_DIAGRAM: dict[str, t.Any] = { + "id": None, + "title": "[CDB] Class tests", + "description_type": "text/html", + "type": "diagram", + "uuid_capella": "_Eiw7IOQ9Ee2tXvmHzHzXCA", + "status": None, + "additional_attributes": {}, +} +TEST_DIAG_DESCR = ( + '

str: + return f"<{type(self).__name__} {self.name!r} ({self.uuid})>" + + +class UnsupportedFakeModelObject(FakeModelObject): + """A ``FakeModelObject`` which shouldn't be migrated.""" + + +class TestModelElements: + @staticmethod + @pytest.fixture + def context() -> dict[str, t.Any]: + api = mock.MagicMock(spec=polarion_api.OpenAPIPolarionProjectClient) + fake = FakeModelObject("uuid1", name="Fake 1") + return { + "API": api, + "PROJECT_ID": "project_id", + "ELEMENTS": { + "FakeModelObject": [ + fake, + FakeModelObject("uuid2", name="Fake 2", attribute=fake), + ], + "UnsupportedFakeModelObject": [ + UnsupportedFakeModelObject("uuid3") + ], + }, + "POLARION_ID_MAP": {"uuid1": "Obj-1"}, + "CONFIG": {}, + "ROLES": {"FakeModelObject": ["attribute"]}, + } + + @staticmethod + def test_create_work_items( + monkeypatch: pytest.MonkeyPatch, context: dict[str, t.Any] + ): + monkeypatch.setattr( + serialize, + "generic_attributes", + mock_generic_attributes := mock.MagicMock(), + ) + mock_generic_attributes.side_effect = [ + wi_ := { + "uuid_capella": "uuid1", + "title": "Fake 1", + "type": "fakeModelObject", + "description_type": "text/html", + "description": markupsafe.Markup(""), + }, + wi_1 := { + "uuid_capella": "uuid2", + "title": "Fake 2", + "type": "fakeModelObject", + "description_type": "text/html", + "description": markupsafe.Markup(""), + }, + ] + + element.create_work_items(context) + + wi, wi1 = context["API"].create_work_items.call_args[0][0] + assert context["API"].create_work_items.call_count == 1 + assert wi == polarion_api.WorkItem(**wi_) # type: ignore[arg-type] + assert wi1 == polarion_api.WorkItem(**wi_1) # type: ignore[arg-type] + + @staticmethod + def test_update_work_items( + monkeypatch: pytest.MonkeyPatch, context: dict[str, t.Any] + ): + monkeypatch.setattr( + serialize, + "generic_attributes", + mock_generic_attributes := mock.MagicMock(), + ) + mock_generic_attributes.return_value = { + "type": "type", + "uuid_capella": "uuid1", + "title": "Something", + "description_type": "text/html", + "description": (expected_markup := markupsafe.Markup("Test")), + } + + element.update_work_items(context) + + work_item = context["API"].update_work_item.call_args[0][0] + assert context["API"].update_work_item.call_count == 1 + assert isinstance(work_item, polarion_api.WorkItem) + assert work_item.id == "Obj-1" + assert work_item.title == "Something" + assert work_item.description_type == "text/html" + assert work_item.description == expected_markup + assert work_item.type is None + assert work_item.uuid_capella is None + assert work_item.status == "open" + + @staticmethod + def test_update_links_with_no_elements(context: dict[str, t.Any]): + context["POLARION_ID_MAP"] = {} + + element.update_links(context) + + assert context["API"].get_all_work_item_links.call_count == 0 + + @staticmethod + def test_update_links(context: dict[str, t.Any]): + context["POLARION_ID_MAP"]["uuid2"] = "Obj-2" + context["API"].get_all_work_item_links.return_value = [ + link := polarion_api.WorkItemLink( + "Obj-1", "Obj-2", "attribute", True, "project_id" + ) + ] + expected_new_link = polarion_api.WorkItemLink( + "Obj-2", "Obj-1", "attribute", None, "project_id" + ) + + element.update_links(context) + + links = context["API"].get_all_work_item_links.call_args_list + assert context["API"].get_all_work_item_links.call_count == 2 + assert [links[0][0][0], links[1][0][0]] == ["Obj-1", "Obj-2"] + new_links = context["API"].create_work_item_links.call_args[0][0] + assert context["API"].create_work_item_links.call_count == 1 + assert new_links == [expected_new_link] + assert context["API"].delete_work_item_links.call_count == 1 + assert context["API"].delete_work_item_links.call_args[0][0] == [link] + + +class TestHelpers: + @staticmethod + def test_resolve_element_type(): + xtype = "LogicalComponent" + + type = helpers.resolve_element_type(xtype) + + assert type == "logicalComponent" + + +class TestSerializers: + @staticmethod + def test_diagram(): + diag = {"uuid": TEST_DIAG_UUID, "name": "test_diagram"} + + serialized_diagram = serialize.diagram( + diag, {"DIAGRAM_CACHE": TEST_DIAGRAM_CACHE} + ) + del serialized_diagram["description"] + + assert serialized_diagram == { + "type": "diagram", + "uuid_capella": TEST_DIAG_UUID, + "title": "test_diagram", + "description_type": "text/html", + } + + @staticmethod + def test__decode_diagram(): + diagram_path = TEST_DIAGRAM_CACHE / "_6Td1kOQ8Ee2tXvmHzHzXCA.svg" + + diagram = serialize._decode_diagram(diagram_path) + + assert diagram.startswith("data:image/svg+xml;base64,") + + @staticmethod + @pytest.mark.parametrize( + "uuid,expected", + [ + ( + TEST_ELEMENT_UUID, + { + "type": "logicalComponent", + "title": "Hogwarts", + "uuid_capella": TEST_ELEMENT_UUID, + "description_type": "text/html", + "description": markupsafe.Markup(TEST_DESCR), + }, + ), + ( + TEST_OCAP_UUID, + { + "type": "operationalCapability", + "title": "Stay alive", + "uuid_capella": TEST_OCAP_UUID, + "description_type": "text/html", + "description": markupsafe.Markup(""), + "additional_attributes": { + "preCondition": {"type": "text/html", "value": ""}, + "postCondition": {"type": "text/html", "value": ""}, + }, + }, + ), + ( + TEST_WE_UUID, + { + "type": "entity", + "title": "Environment", + "uuid_capella": TEST_WE_UUID, + "description_type": "text/html", + "description": markupsafe.Markup(TEST_WE_DESCR), + }, + ), + ( + TEST_ACTOR_UUID, + { + "type": "logicalActor", + "title": "Prof. A. P. W. B. Dumbledore", + "uuid_capella": TEST_ACTOR_UUID, + "description_type": "text/html", + "description": markupsafe.Markup( + "

Principal of Hogwarts, wearer of the elder wand " + "and greatest mage of all time.

\n" + ), + }, + ), + ( + TEST_PHYS_COMP, + { + "type": "physicalComponent", + "title": "Physical System", + "uuid_capella": TEST_PHYS_COMP, + "description_type": "text/html", + "description": markupsafe.Markup(""), + }, + ), + ( + TEST_PHYS_NODE, + { + "type": "physicalComponentNode", + "title": "PC 1", + "uuid_capella": TEST_PHYS_NODE, + "description_type": "text/html", + "description": markupsafe.Markup(""), + }, + ), + ], + ) + def test_generic_attributes( + model: capellambse.MelodyModel, uuid: str, expected: dict[str, t.Any] + ): + obj = model.by_uuid(uuid) + + attributes = serialize.generic_attributes( + obj, + { + "POLARION_ID_MAP": TEST_POL_ID_MAP, + "POLARION_TYPE_MAP": TEST_POL_TYPE_MAP, + }, + ) + + assert attributes == expected