From 21f89def6a0766d7a0e55109c148e2edd0ef161f Mon Sep 17 00:00:00 2001 From: Johan Karlberg Date: Mon, 22 Jan 2024 15:44:20 +0100 Subject: [PATCH] apa --- .github/workflows/build-docs.yaml | 14 ++ .github/workflows/code-qa.yaml | 36 +++++ .gitignore | 4 + .python-version | 1 + python/remotivelabs-broker/.ruff.toml | 79 ++++++++++ python/remotivelabs-broker/docker-build.sh | 2 +- python/remotivelabs-broker/docker/Dockerfile | 2 +- .../docker/build-all-in-docker.sh | 6 +- .../misc/fix_import_statements.py | 1 + python/remotivelabs-broker/pyproject.toml | 9 +- .../remotivelabs/__init__.py | 0 .../remotivelabs/broker/.gitignore | 1 + .../remotivelabs/broker/__init__.py | 11 -- .../broker/generated/sync/.gitignore | 1 - .../remotivelabs/broker/py.typed | 0 .../remotivelabs/broker/sync/__init__.py | 25 ++-- .../remotivelabs/broker/sync/client.py | 80 +++++----- .../remotivelabs/broker/sync/helper.py | 138 ++++++------------ .../remotivelabs/broker/sync/log.py | 6 + .../remotivelabs/broker/sync/signalcreator.py | 47 ++---- python/remotivelabs-broker/tests/__init__.py | 3 - python/remotivelabs-broker/tests/test_live.py | 11 +- .../tests/test_proto_types.py | 1 - 23 files changed, 264 insertions(+), 214 deletions(-) create mode 100644 .github/workflows/build-docs.yaml create mode 100644 .github/workflows/code-qa.yaml create mode 100644 .gitignore create mode 100644 .python-version create mode 100644 python/remotivelabs-broker/.ruff.toml create mode 100644 python/remotivelabs-broker/remotivelabs/__init__.py create mode 100644 python/remotivelabs-broker/remotivelabs/broker/.gitignore delete mode 100644 python/remotivelabs-broker/remotivelabs/broker/generated/sync/.gitignore create mode 100644 python/remotivelabs-broker/remotivelabs/broker/py.typed create mode 100644 python/remotivelabs-broker/remotivelabs/broker/sync/log.py delete mode 100644 python/remotivelabs-broker/tests/__init__.py diff --git a/.github/workflows/build-docs.yaml b/.github/workflows/build-docs.yaml new file mode 100644 index 0000000..602e7f8 --- /dev/null +++ b/.github/workflows/build-docs.yaml @@ -0,0 +1,14 @@ +name: Doc builder + +on: push + +jobs: + build-docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build docs + run: | + cd python/remotivelabs-broker + ./docker-build.sh diff --git a/.github/workflows/code-qa.yaml b/.github/workflows/code-qa.yaml new file mode 100644 index 0000000..2b356bc --- /dev/null +++ b/.github/workflows/code-qa.yaml @@ -0,0 +1,36 @@ +name: Code-QA + +on: push + +jobs: + mypy: + runs-on: ubuntu-latest + steps: + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.8 + architecture: x64 + - name: Checkout + uses: actions/checkout@v4 + - name: Install mypy + run: | + pip install mypy + pip install python/remotivelabs-broker + - name: Run mypy + run: mypy . + + ruff-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 + + ruff-format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 + with: + src: "." + args: format --check --diff diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..6f65ad8 --- /dev/null +++ b/.gitignore @@ -0,0 +1,4 @@ +.cache +.venv +.local +.config diff --git a/.python-version b/.python-version new file mode 100644 index 0000000..cc1923a --- /dev/null +++ b/.python-version @@ -0,0 +1 @@ +3.8 diff --git a/python/remotivelabs-broker/.ruff.toml b/python/remotivelabs-broker/.ruff.toml new file mode 100644 index 0000000..424a177 --- /dev/null +++ b/python/remotivelabs-broker/.ruff.toml @@ -0,0 +1,79 @@ +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + 'deps', + 'binaries', + '__pycache__' +] + +line-length = 140 +indent-width = 4 + +# Assume Python 3.8 +target-version = "py38" + +[lint] +# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default. +# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or +# McCabe complexity (`C901`) by default. +select = ["E4", "E7", "E9", "F"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" diff --git a/python/remotivelabs-broker/docker-build.sh b/python/remotivelabs-broker/docker-build.sh index a339889..7d8732a 100755 --- a/python/remotivelabs-broker/docker-build.sh +++ b/python/remotivelabs-broker/docker-build.sh @@ -11,4 +11,4 @@ docker run \ -v $(pwd)/../../:/app \ -e "protofile=*.proto" \ -w /app \ - -it remotivelabs/python-api-build-image \ No newline at end of file + -it remotivelabs/python-api-build-image diff --git a/python/remotivelabs-broker/docker/Dockerfile b/python/remotivelabs-broker/docker/Dockerfile index 6a35c6d..6abb261 100644 --- a/python/remotivelabs-broker/docker/Dockerfile +++ b/python/remotivelabs-broker/docker/Dockerfile @@ -11,6 +11,7 @@ RUN pip install hatch json-schema-for-humans RUN pip3 install grpcio-tools==1.44.0 RUN pip3 install hatch RUN pip3 install pdoc +RUN pip3 install mypy-protobuf==3.3.0 COPY docker/download_protoc.sh . RUN rm /bin/sh && ln -s /bin/bash /bin/sh RUN sh download_protoc.sh @@ -25,4 +26,3 @@ ENV protofile=*.proto VOLUME /ouput CMD ["./python/remotivelabs-broker/docker/build-all-in-docker.sh"] - diff --git a/python/remotivelabs-broker/docker/build-all-in-docker.sh b/python/remotivelabs-broker/docker/build-all-in-docker.sh index 8ac1722..dd48c8e 100755 --- a/python/remotivelabs-broker/docker/build-all-in-docker.sh +++ b/python/remotivelabs-broker/docker/build-all-in-docker.sh @@ -4,11 +4,15 @@ set -e HOME=/app/python/remotivelabs-broker STUBS_OUTPUT=$HOME/remotivelabs/broker/generated/sync +mkdir -p "${STUBS_OUTPUT}" + #1.1 Generate stubs python3 -m grpc_tools.protoc \ -I /app/protos \ --python_out=$STUBS_OUTPUT \ --grpc_python_out=$STUBS_OUTPUT \ + --mypy_out=$STUBS_OUTPUT \ + --mypy_grpc_out=$STUBS_OUTPUT \ /app/protos/*.proto @@ -63,4 +67,4 @@ function generate_json() { generate_python_docs (cd /app/protos && generate_proto_docs) -(cd /app/schemas/ && generate_json) \ No newline at end of file +(cd /app/schemas/ && generate_json) diff --git a/python/remotivelabs-broker/misc/fix_import_statements.py b/python/remotivelabs-broker/misc/fix_import_statements.py index 5ce383e..673f22b 100644 --- a/python/remotivelabs-broker/misc/fix_import_statements.py +++ b/python/remotivelabs-broker/misc/fix_import_statements.py @@ -12,6 +12,7 @@ import glob files = glob.glob("remotivelabs/broker/generated/sync/*.py") +files = files + glob.glob("remotivelabs/broker/generated/sync/*.pyi") regex_string = r"^import \w+_pb2" substitute_string = "from . \\g<0>" diff --git a/python/remotivelabs-broker/pyproject.toml b/python/remotivelabs-broker/pyproject.toml index 498d99c..e61b64d 100644 --- a/python/remotivelabs-broker/pyproject.toml +++ b/python/remotivelabs-broker/pyproject.toml @@ -38,8 +38,12 @@ classifiers = [ dependencies = [ "grpc-interceptor~=0.14", "grpcio~=1.44", + "grpc-stubs~=1.53.0.5", "requests~=2.21", - "protobuf>=3.19.0,<=3.20.1" + "protobuf>=3.19.0,<=3.20.1", + "mypy-protobuf~=3.3.0", + "types-protobuf~=4.24.0.20240106", + "grpc-interceptor" ] dynamic = ["version"] @@ -92,5 +96,4 @@ exclude_lines = [ ] [tool.hatch.build.targets.wheel] -packages = ["remotivelabs"] - +packages = ["remotivelabs", "remotivelabs/broker/py.typed"] diff --git a/python/remotivelabs-broker/remotivelabs/__init__.py b/python/remotivelabs-broker/remotivelabs/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/remotivelabs-broker/remotivelabs/broker/.gitignore b/python/remotivelabs-broker/remotivelabs/broker/.gitignore new file mode 100644 index 0000000..9ab870d --- /dev/null +++ b/python/remotivelabs-broker/remotivelabs/broker/.gitignore @@ -0,0 +1 @@ +generated/ diff --git a/python/remotivelabs-broker/remotivelabs/broker/__init__.py b/python/remotivelabs-broker/remotivelabs/broker/__init__.py index 5cf30ba..6eeaed0 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/__init__.py +++ b/python/remotivelabs-broker/remotivelabs/broker/__init__.py @@ -23,14 +23,3 @@ # SPDX-FileCopyrightText: 2022-present remotiveLabs # # SPDX-License-Identifier: Apache-2.0 - -from .__about__ import __version__ -import logging - -log: logging.Logger = logging.getLogger("com.remotivelabs.broker") -"""Package logging interface""" - -log.addHandler(logging.NullHandler()) - -version: str = __version__ -"""Library version""" diff --git a/python/remotivelabs-broker/remotivelabs/broker/generated/sync/.gitignore b/python/remotivelabs-broker/remotivelabs/broker/generated/sync/.gitignore deleted file mode 100644 index f104652..0000000 --- a/python/remotivelabs-broker/remotivelabs/broker/generated/sync/.gitignore +++ /dev/null @@ -1 +0,0 @@ -*.py diff --git a/python/remotivelabs-broker/remotivelabs/broker/py.typed b/python/remotivelabs-broker/remotivelabs/broker/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py b/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py index 19fd0cb..8d8804f 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py @@ -22,18 +22,18 @@ Link: . """ -from ..generated.sync import common_pb2 -from ..generated.sync import common_pb2_grpc -from ..generated.sync import diagnostics_api_pb2 -from ..generated.sync import diagnostics_api_pb2_grpc -from ..generated.sync import functional_api_pb2 -from ..generated.sync import functional_api_pb2_grpc -from ..generated.sync import network_api_pb2 -from ..generated.sync import network_api_pb2_grpc -from ..generated.sync import system_api_pb2 -from ..generated.sync import system_api_pb2_grpc -from ..generated.sync import traffic_api_pb2 -from ..generated.sync import traffic_api_pb2_grpc +from ..generated.sync import common_pb2 # noqa: F401 +from ..generated.sync import common_pb2_grpc # noqa: F401 +from ..generated.sync import diagnostics_api_pb2 # noqa: F401 +from ..generated.sync import diagnostics_api_pb2_grpc # noqa: F401 +from ..generated.sync import functional_api_pb2 # noqa: F401 +from ..generated.sync import functional_api_pb2_grpc # noqa: F401 +from ..generated.sync import network_api_pb2 # noqa: F401 +from ..generated.sync import network_api_pb2_grpc # noqa: F401 +from ..generated.sync import system_api_pb2 # noqa: F401 +from ..generated.sync import system_api_pb2_grpc # noqa: F401 +from ..generated.sync import traffic_api_pb2 # noqa: F401 +from ..generated.sync import traffic_api_pb2_grpc # noqa: F401 from .signalcreator import SignalCreator from .client import Client @@ -41,7 +41,6 @@ from .client import SignalsInFrame from .client import SignalIdentifier from .client import BrokerException -from .client import SignalsInFrame from .helper import create_channel from .helper import publish_signals from .helper import printer diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/client.py b/python/remotivelabs-broker/remotivelabs/broker/sync/client.py index fb6921e..0b62cd3 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/client.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/client.py @@ -3,10 +3,11 @@ import binascii import json import queue +import grpc from threading import Thread -from typing import Union, Callable, List, Iterable +from typing import Union, Callable, List, Iterable, Optional -from . import SignalCreator +from .signalcreator import SignalCreator from . import helper as br from ..generated.sync import network_api_pb2 as network_api from ..generated.sync import network_api_pb2_grpc @@ -97,15 +98,14 @@ def bytes_value(self): def as_dict(self): return { - 'timestamp_us': self.timestamp_us(), - 'name': self.name(), - 'namespace': self.namespace(), - 'value': self.value() + "timestamp_us": self.timestamp_us(), + "name": self.name(), + "namespace": self.namespace(), + "value": self.value(), } class SignalsInFrame(Iterable): - def __init__(self, signals: List[SignalValue]): self.signals = signals self.index = 0 @@ -133,23 +133,19 @@ class BrokerException(Exception): class Client: - def __init__(self, client_id: str = "broker_client"): - - self._signal_creator = None - self._traffic_stub = None - self._system_stub = None - self._network_stub = None - self._intercept_channel = None + self._signal_creator: SignalCreator + self._traffic_stub: traffic_api_pb2_grpc.TrafficServiceStub + self._system_stub: system_api_pb2_grpc.SystemServiceStub + self._network_stub: network_api_pb2_grpc.NetworkServiceStub + self._intercept_channel: grpc.Channel self.client_id = client_id - self.url = None - self.api_key = None + self.url: Optional[str] = None + self.api_key: Optional[str] = None self.on_connect: Union[Callable[[Client], None], None] = None self.on_signals: Union[Callable[[SignalsInFrame], None], None] = None - def connect(self, - url: str, - api_key: Union[str, None] = None): + def connect(self, url: str, api_key: Union[str, None] = None): self.url = url self.api_key = api_key if url.startswith("https"): @@ -166,8 +162,9 @@ def connect(self, if self.on_connect is not None: self.on_connect(self) - def _validate_and_get_subscribed_signals(self, subscribed_namespaces: List[str], subscribed_signals: List[str]) \ - -> List[SignalIdentifier]: + def _validate_and_get_subscribed_signals( + self, subscribed_namespaces: List[str], subscribed_signals: List[str] + ) -> List[SignalIdentifier]: # Since we cannot know which List[signals] belongs to which namespace we need to fetch # all signals from the broker and find the proper signal with namespace. Finally, we # also filter out namespaces that we do not need since we might have duplicated signal names @@ -175,7 +172,12 @@ def _validate_and_get_subscribed_signals(self, subscribed_namespaces: List[str], # Begin def verify_namespace(available_signal: SignalIdentifier): - return list(filter(lambda namespace: available_signal.namespace == namespace, subscribed_namespaces)) + return list( + filter( + lambda namespace: available_signal.namespace == namespace, + subscribed_namespaces, + ) + ) def find_subscribed_signal(available_signal: SignalIdentifier): return list(filter(lambda s: available_signal.name == s, subscribed_signals)) @@ -184,36 +186,34 @@ def find_subscribed_signal(available_signal: SignalIdentifier): signals_to_subscribe_to: List[SignalIdentifier] = list(filter(find_subscribed_signal, available_signals)) # Check if subscription is done on signal that is not in any of these namespaces - signals_subscribed_to_but_does_not_exist = \ - set(subscribed_signals) - set(map(lambda s: s.name, signals_to_subscribe_to)) + signals_subscribed_to_but_does_not_exist = set(subscribed_signals) - set(map(lambda s: s.name, signals_to_subscribe_to)) if len(signals_subscribed_to_but_does_not_exist) > 0: - raise BrokerException(f"One or more signals you subscribed to does not exist " - f", {signals_subscribed_to_but_does_not_exist}") + raise BrokerException(f"One or more signals you subscribed to does not exist " f", {signals_subscribed_to_but_does_not_exist}") return list(map(lambda s: SignalIdentifier(s.name, s.namespace), signals_to_subscribe_to)) - def subscribe(self, - signal_names: List[str], - namespaces: List[str], - on_signals: Callable[[SignalsInFrame], None] = None, - changed_values_only: bool = True): - + def subscribe( + self, + signal_names: List[str], + namespaces: List[str], + on_signals: Callable[[SignalsInFrame], None], + changed_values_only: bool = True, + ): if on_signals is None and self.on_signals is None: raise BrokerException( "You have not specified global client.on_signals nor client.subscribe(on_signals=callback), " - "or you are invoking subscribe() before client.on_signals which is not allowed") + "or you are invoking subscribe() before client.on_signals which is not allowed" + ) client_id = br.common_pb2.ClientId(id=self.client_id) - signals_to_subscribe_to: List[SignalIdentifier] = self._validate_and_get_subscribed_signals( - namespaces, - signal_names) + signals_to_subscribe_to: List[SignalIdentifier] = self._validate_and_get_subscribed_signals(namespaces, signal_names) def to_protobuf_signal(s: SignalIdentifier): return self._signal_creator.signal(s.name, s.namespace) signals_to_subscribe_on = list(map(to_protobuf_signal, signals_to_subscribe_to)) - wait_for_subscription_queue = queue.Queue() + wait_for_subscription_queue: queue.Queue = queue.Queue() Thread( target=br.act_on_signal, args=( @@ -222,7 +222,7 @@ def to_protobuf_signal(s: SignalIdentifier): signals_to_subscribe_on, changed_values_only, # True: only report when signal changes lambda frame: self._on_signals(frame, on_signals), - lambda sub: (wait_for_subscription_queue.put((self.client_id, sub))) + lambda sub: (wait_for_subscription_queue.put((self.client_id, sub))), ), ).start() client_id, subscription = wait_for_subscription_queue.get() @@ -233,9 +233,9 @@ def _on_signals(self, signals_in_frame: network_api.Signals, callback): Updates "local" callback or global on_signals callback if local callback is None """ if callback is not None: - callback(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) + callback(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) # type: ignore[call-overload] elif self.on_signals is not None: - self.on_signals(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) + self.on_signals(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) # type: ignore[call-overload] def list_signal_names(self) -> List[SignalIdentifier]: # Lists available signals diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py b/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py index 4a87c82..9a5b9e0 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py @@ -1,11 +1,7 @@ -from ..generated.sync import common_pb2 -from ..generated.sync import network_api_pb2 -from ..generated.sync import system_api_pb2 -from .. import log -import remotivelabs.broker.sync as br +from ..generated.sync import common_pb2, network_api_pb2_grpc, network_api_pb2, system_api_pb2, system_api_pb2_grpc +from .log import log import os -import base64 import grpc import hashlib import itertools @@ -40,7 +36,7 @@ def intercept( return method(request_or_iterator, new_details) -def create_channel(url: str, x_api_key: Optional[str] = None) -> grpc.intercept_channel: +def create_channel(url: str, x_api_key: Optional[str] = None, authorization_token: Optional[str] = None) -> grpc.Channel: """ Create communication channels for gRPC calls. @@ -49,44 +45,32 @@ def create_channel(url: str, x_api_key: Optional[str] = None) -> grpc.intercept_ :param authorization_token: Access token replacing api-keys moving forward. :return: gRPC channel """ - return create_channel(url, x_api_key, None) + parsed_url = urlparse(url) + if parsed_url.hostname is None: + raise BaseException() -def create_channel(url: str, x_api_key: Optional[str] = None, - authorization_token: Optional[str] = None) -> grpc.intercept_channel: - """ - Create communication channels for gRPC calls. - - :param url: URL to broker - :param x_api_key: API key used with RemotiveBroker running in cloud (deprecated). - :param authorization_token: Access token replacing api-keys moving forward. - :return: gRPC channel - """ - - url = urlparse(url) - - if url.scheme == "https": - creds = grpc.ssl_channel_credentials( - root_certificates=None, private_key=None, certificate_chain=None - ) - channel = grpc.secure_channel( - url.hostname + ":" + str(url.port or "443"), creds - ) + if parsed_url.scheme == "https": + creds = grpc.ssl_channel_credentials(root_certificates=None, private_key=None, certificate_chain=None) + channel = grpc.secure_channel(parsed_url.hostname + ":" + str(parsed_url.port or "443"), creds) else: - addr = url.hostname + ":" + str(url.port or "50051") + addr = parsed_url.hostname + ":" + str(parsed_url.port or "50051") channel = grpc.insecure_channel(addr) if x_api_key is None and authorization_token is None: return channel elif x_api_key is not None: - return grpc.intercept_channel( - channel, HeaderInterceptor({"x-api-key": x_api_key}) - ) + return grpc.intercept_channel(channel, HeaderInterceptor({"x-api-key": x_api_key})) else: # Adding both x-api-key (old) and authorization header for compatibility return grpc.intercept_channel( - channel, HeaderInterceptor({"x-api-key": authorization_token, - "authorization": f"Bearer {authorization_token}"}) + channel, + HeaderInterceptor( + { + "x-api-key": authorization_token, + "authorization": f"Bearer {authorization_token}", + } + ), ) @@ -107,7 +91,7 @@ def publish_signals(client_id, stub, signals_with_payload, frequency: int = 0) - try: stub.PublishSignals(publisher_info) - except grpc._channel._Rendezvous as err: + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) @@ -119,11 +103,7 @@ def printer(signals: Sequence[common_pb2.SignalId]) -> None: """ for signal in signals: - log.info( - "{} {} {}".format( - signal.id.name, signal.id.namespace.name, get_value(signal) - ) - ) + log.info("{} {}".format(signal, signal.namespace.name)) def get_sha256(path: str) -> str: @@ -141,14 +121,10 @@ def get_sha256(path: str) -> str: return readable_hash -def generate_data( - file, dest_path, chunk_size, sha256 -) -> Generator[system_api_pb2.FileUploadRequest, None, None]: +def generate_data(file, dest_path, chunk_size, sha256) -> Generator[system_api_pb2.FileUploadRequest, None, None]: for x in itertools.count(start=0): if x == 0: - fileDescription = system_api_pb2.FileDescription( - sha256=sha256, path=dest_path - ) + fileDescription = system_api_pb2.FileDescription(sha256=sha256, path=dest_path) yield system_api_pb2.FileUploadRequest(fileDescription=fileDescription) else: buf = file.read(chunk_size) @@ -157,9 +133,7 @@ def generate_data( yield system_api_pb2.FileUploadRequest(chunk=buf) -def upload_file( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str -) -> None: +def upload_file(system_stub: system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str) -> None: """ Upload single file to internal storage on broker. @@ -174,18 +148,12 @@ def upload_file( # make sure path is unix style (necessary for windows, and does no harm om # linux) - upload_iterator = generate_data( - file, dest_path.replace(ntpath.sep, posixpath.sep), 1000000, sha256 - ) - response = system_stub.UploadFile( - upload_iterator, compression=grpc.Compression.Gzip - ) + upload_iterator = generate_data(file, dest_path.replace(ntpath.sep, posixpath.sep), 1000000, sha256) + response = system_stub.UploadFile(upload_iterator, compression=grpc.Compression.Gzip) log.debug("Uploaded {} with response {}".format(path, response)) -def download_file( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str -) -> None: +def download_file(system_stub: system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str) -> None: """ Download file from Broker remote storage. @@ -196,21 +164,14 @@ def download_file( file = open(dest_path, "wb") for response in system_stub.BatchDownloadFiles( - system_api_pb2.FileDescriptions( - fileDescriptions=[system_api_pb2.FileDescription(path=path.replace(ntpath.sep, - posixpath.sep))] - ) + system_api_pb2.FileDescriptions(fileDescriptions=[system_api_pb2.FileDescription(path=path.replace(ntpath.sep, posixpath.sep))]) ): - assert not response.HasField("errorMessage"), ( - "Error uploading file, message is: %s" % response.errorMessage - ) + assert not response.HasField("errorMessage"), "Error uploading file, message is: %s" % response.errorMessage file.write(response.chunk) file.close() -def upload_folder( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, folder: str -) -> None: +def upload_folder(system_stub: system_api_pb2_grpc.SystemServiceStub, folder: str) -> None: """ Upload directory and its content to Broker remote storage. @@ -218,19 +179,14 @@ def upload_folder( :param folder: Path to directory in local file storage """ - files = [ - y - for x in os.walk(folder) - for y in glob(os.path.join(x[0], "*")) - if not os.path.isdir(y) - ] + files = [y for x in os.walk(folder) for y in glob(os.path.join(x[0], "*")) if not os.path.isdir(y)] assert len(files) != 0, "Specified upload folder is empty or does not exist" for file in files: upload_file(system_stub, file, file.replace(folder, "")) def reload_configuration( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, + system_stub: system_api_pb2_grpc.SystemServiceStub, ) -> None: """ Trigger reload of configuration on Broker. @@ -244,7 +200,7 @@ def reload_configuration( def check_license( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, + system_stub: system_api_pb2_grpc.SystemServiceStub, ) -> None: """ Check license to Broker. Throws exception if failure. @@ -252,17 +208,15 @@ def check_license( :param system_stub: System gRPC channel stub """ status = system_stub.GetLicenseInfo(common_pb2.Empty()).status - assert status == system_api_pb2.LicenseStatus.VALID, ( - "Check your license, status is: %d" % status - ) + assert status == system_api_pb2.LicenseStatus.VALID, "Check your license, status is: %d" % status def act_on_signal( - client_id: br.common_pb2.ClientId, - network_stub: br.network_api_pb2_grpc.NetworkServiceStub, - sub_signals: Sequence[br.common_pb2.SignalId], + client_id: common_pb2.ClientId, + network_stub: network_api_pb2_grpc.NetworkServiceStub, + sub_signals: Sequence[common_pb2.SignalId], on_change: bool, - fun: Callable[[Sequence[br.network_api_pb2.Signal]], None], + fun: Callable[[Sequence[network_api_pb2.Signal]], None], on_subscribed: Optional[Callable[..., None]] = None, ) -> None: """ @@ -298,21 +252,21 @@ def act_on_signal( subscripton.cancel() print("A gRPC error occurred:") print(e) - except grpc.RpcError as e2: + except grpc.RpcError: pass - except grpc._channel._Rendezvous as err: + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) # reload, alternatively non-existing signal log.debug("Subscription terminated") def act_on_scripted_signal( - client_id: br.common_pb2.ClientId, - network_stub: br.network_api_pb2_grpc.NetworkServiceStub, + client_id: common_pb2.ClientId, + network_stub: network_api_pb2_grpc.NetworkServiceStub, script: bytes, on_change: bool, - fun: Callable[[Sequence[br.network_api_pb2.Signal]], None], + fun: Callable[[Sequence[network_api_pb2.Signal]], None], on_subscribed: Optional[Callable[..., None]] = None, ) -> None: """ @@ -334,9 +288,7 @@ def act_on_scripted_signal( onChange=on_change, ) try: - subscription = network_stub.SubscribeToSignalWithScript( - sub_info, timeout=None - ) + subscription = network_stub.SubscribeToSignalWithScript(sub_info, timeout=None) if on_subscribed: on_subscribed(subscription) log.debug("Waiting for signal...") @@ -348,10 +300,10 @@ def act_on_scripted_signal( subscription.cancel() print("A gRPC error occurred:") print(e) - except grpc.RpcError as e2: + except grpc.RpcError: pass - except grpc._channel._Rendezvous as err: + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) # reload, alternatively non-existing signal log.debug("Subscription terminated") diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/log.py b/python/remotivelabs-broker/remotivelabs/broker/sync/log.py new file mode 100644 index 0000000..994583f --- /dev/null +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/log.py @@ -0,0 +1,6 @@ +import logging + +log: logging.Logger = logging.getLogger("com.remotivelabs.broker") +"""Package logging interface""" + +log.addHandler(logging.NullHandler()) diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py b/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py index 30084a1..228889e 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py @@ -1,5 +1,3 @@ -import sys - import logging from ..generated.sync import common_pb2 @@ -10,10 +8,7 @@ T = TypeVar("T") _logger = logging.getLogger("remotivelabs.SignalCreator") -_MSG_DUPLICATE = ( - "Warning duplicated (namespace.signal): {}, to avoid" - + 'ambiguity set "short_names": false in your interfaces.json on {}' -) +_MSG_DUPLICATE = "Warning duplicated (namespace.signal): {}, to avoid" + 'ambiguity set "short_names": false in your interfaces.json on {}' class MetaGetter: @@ -132,9 +127,7 @@ def signal(self, name: str, namespace_name: str) -> common_pb2.SignalId: """ self.get_meta(name, namespace_name) # Checks if the signal is present - return common_pb2.SignalId( - name=name, namespace=common_pb2.NameSpace(name=namespace_name) - ) + return common_pb2.SignalId(name=name, namespace=common_pb2.NameSpace(name=namespace_name)) def frames(self, namespace_name: str) -> Sequence[common_pb2.SignalId]: """ @@ -146,9 +139,7 @@ def frames(self, namespace_name: str) -> Sequence[common_pb2.SignalId]: all_frames.append(self.signal(finfo.signalInfo.id.name, namespace_name)) return all_frames - def frame_by_signal( - self, name: str, namespace_name: str - ) -> Sequence[common_pb2.SignalId]: + def frame_by_signal(self, name: str, namespace_name: str) -> common_pb2.SignalId: """ Get frame for the given signal. @@ -161,13 +152,9 @@ def frame_by_signal( for sinfo in finfo.childInfo: if sinfo.id.name == name: return self.signal(finfo.signalInfo.id.name, namespace_name) - raise Exception( - f"signal not declared (namespace, signal): {namespace_name} {name}" - ) + raise Exception(f"signal not declared (namespace, signal): {namespace_name} {name}") - def signals_in_frame( - self, name: str, namespace_name: str - ) -> Sequence[common_pb2.SignalId]: + def signals_in_frame(self, name: str, namespace_name: str) -> Sequence[common_pb2.SignalId]: """ Get all signals residing in the frame. @@ -183,14 +170,10 @@ def signals_in_frame( frame = finfo for sinfo in finfo.childInfo: all_signals.append(self.signal(sinfo.id.name, namespace_name)) - assert ( - frame is not None - ), f"frame {name} does not exist in namespace {namespace_name}" + assert frame is not None, f"frame {name} does not exist in namespace {namespace_name}" return all_signals - def signal_with_payload( - self, name: str, namespace_name: str, value_pair, allow_malformed: bool = False - ) -> network_api_pb2.Signal: + def signal_with_payload(self, name: str, namespace_name: str, value_pair, allow_malformed: bool = False) -> network_api_pb2.Signal: """ Create value with signal for writing. @@ -209,25 +192,15 @@ def signal_with_payload( raise Exception(f"type must be one of: {types}") if key == "raw" and allow_malformed is False: expected = meta.getSize() - assert ( - len(value) * 8 == expected - ), f"payload size missmatch, expected {expected/8} bytes" + assert len(value) * 8 == expected, f"payload size missmatch, expected {expected/8} bytes" elif key != "raw": # Check bounds if any checkMin = meta.getMin() if (checkMin is not None) and (value < checkMin): - _logger.warning( - 'Value below minimum value of {} for signal "{}"'.format( - checkMin, name - ) - ) + _logger.warning('Value below minimum value of {} for signal "{}"'.format(checkMin, name)) checkMax = meta.getMax() if (checkMax is not None) and (value > checkMax): - _logger.warning( - 'Value above maximum value of {} for signal "{}"'.format( - checkMax, name - ) - ) + _logger.warning('Value above maximum value of {} for signal "{}"'.format(checkMax, name)) params = {"id": signal, key: value} return network_api_pb2.Signal(**params) diff --git a/python/remotivelabs-broker/tests/__init__.py b/python/remotivelabs-broker/tests/__init__.py deleted file mode 100644 index a48296a..0000000 --- a/python/remotivelabs-broker/tests/__init__.py +++ /dev/null @@ -1,3 +0,0 @@ -# SPDX-FileCopyrightText: 2022-present remotiveLabs -# -# SPDX-License-Identifier: Apache-2.0 diff --git a/python/remotivelabs-broker/tests/test_live.py b/python/remotivelabs-broker/tests/test_live.py index 1aa1a51..caee184 100644 --- a/python/remotivelabs-broker/tests/test_live.py +++ b/python/remotivelabs-broker/tests/test_live.py @@ -1,4 +1,3 @@ -import google.protobuf import logging import pytest import remotivelabs.broker.sync as br @@ -75,17 +74,11 @@ def test_min_max(broker_configured, caplog): # Publing a value below mininum sc.signal_with_payload("Speed", "ecu_A", ("double", -1.0)) - assert ( - caplog.records[0].message - == 'Value below minimum value of 0.0 for signal "Speed"' - ) + assert caplog.records[0].message == 'Value below minimum value of 0.0 for signal "Speed"' # Publing a value above maximum sc.signal_with_payload("Speed", "ecu_A", ("double", 91.0)) - assert ( - caplog.records[1].message - == 'Value above maximum value of 90.0 for signal "Speed"' - ) + assert caplog.records[1].message == 'Value above maximum value of 90.0 for signal "Speed"' assert len(caplog.records) == 2 diff --git a/python/remotivelabs-broker/tests/test_proto_types.py b/python/remotivelabs-broker/tests/test_proto_types.py index 96f1e6e..f02e5aa 100644 --- a/python/remotivelabs-broker/tests/test_proto_types.py +++ b/python/remotivelabs-broker/tests/test_proto_types.py @@ -1,4 +1,3 @@ -import google.protobuf as pb import remotivelabs.broker.sync as br # Tests