From 0ba2931d5e869785bd7234b750dced08192146a8 Mon Sep 17 00:00:00 2001 From: Johan Karlberg Date: Tue, 23 Jan 2024 10:33:49 +0100 Subject: [PATCH] Introduced workflows, mypy, ruff and pylint - Workflow that validates that the python apis can be generated * mypy runs after this because it is dependent on generated code - Added ruff configuration * workflow validate formatting and code style - Added missing __init__.py files that are needed by tools in order to derive fqn - Added py.typed * Package maintainers who wish to support type checking of their code MUST add a marker file named py.typed to their package supporting typing - .pylintrc which ignores some more warnings compared to the default settings - Formatted code according to new rules, ignored some warnings --- .github/workflows/build-docs-mypy.yaml | 29 + .github/workflows/code-qa.yaml | 39 ++ python/remotivelabs-broker/.pylintrc | 640 ++++++++++++++++++ python/remotivelabs-broker/.ruff.toml | 76 +++ python/remotivelabs-broker/misc/__init__.py | 0 .../misc/fix_import_statements.py | 19 +- .../remotivelabs/__init__.py | 3 + .../remotivelabs/broker/__init__.py | 3 +- .../remotivelabs/broker/generated/__init__.py | 0 .../remotivelabs/broker/py.typed | 0 .../remotivelabs/broker/sync/__init__.py | 64 +- .../remotivelabs/broker/sync/client.py | 114 ++-- .../remotivelabs/broker/sync/helper.py | 197 +++--- .../remotivelabs/broker/sync/signalcreator.py | 95 +-- python/remotivelabs-broker/tests/test_live.py | 23 +- .../tests/test_proto_types.py | 3 +- 16 files changed, 1007 insertions(+), 298 deletions(-) create mode 100644 .github/workflows/build-docs-mypy.yaml create mode 100644 .github/workflows/code-qa.yaml create mode 100644 python/remotivelabs-broker/.pylintrc create mode 100644 python/remotivelabs-broker/.ruff.toml create mode 100644 python/remotivelabs-broker/misc/__init__.py create mode 100644 python/remotivelabs-broker/remotivelabs/__init__.py create mode 100644 python/remotivelabs-broker/remotivelabs/broker/generated/__init__.py create mode 100644 python/remotivelabs-broker/remotivelabs/broker/py.typed diff --git a/.github/workflows/build-docs-mypy.yaml b/.github/workflows/build-docs-mypy.yaml new file mode 100644 index 0000000..3968ff9 --- /dev/null +++ b/.github/workflows/build-docs-mypy.yaml @@ -0,0 +1,29 @@ +name: Doc builder and mypy + +on: push + +jobs: + build-docs: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Build docs + env: + NO_TTY: true + run: | + cd python/remotivelabs-broker + ./docker-build.sh + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.8 + architecture: x64 + - name: Install mypy + run: | + pip install mypy + pip install python/remotivelabs-broker + pip install python/remotivelabs-broker[default] + - name: Run mypy + run: | + mypy . diff --git a/.github/workflows/code-qa.yaml b/.github/workflows/code-qa.yaml new file mode 100644 index 0000000..4f7fb27 --- /dev/null +++ b/.github/workflows/code-qa.yaml @@ -0,0 +1,39 @@ +name: Code-QA + +on: push + +jobs: + ruff-lint: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 + + ruff-format: + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + - uses: chartboost/ruff-action@v1 + with: + src: "." + args: format --check --diff + + pylint: + runs-on: ubuntu-latest + steps: + - name: Checkout + uses: actions/checkout@v4 + - name: Setup Python + uses: actions/setup-python@v5 + with: + python-version: 3.8 + architecture: x64 + - name: Install pylint + run: | + pip install pylint + pip install python/remotivelabs-broker + pip install python/remotivelabs-broker[default] + - name: Run pylint + run: | + cd python/remotivelabs-broker + pylint **/*.py diff --git a/python/remotivelabs-broker/.pylintrc b/python/remotivelabs-broker/.pylintrc new file mode 100644 index 0000000..1d2ae19 --- /dev/null +++ b/python/remotivelabs-broker/.pylintrc @@ -0,0 +1,640 @@ +[MAIN] + +# Analyse import fallback blocks. This can be used to support both Python 2 and +# 3 compatible code, which means that the block might have code that exists +# only in one or another interpreter, leading to false positives when analysed. +analyse-fallback-blocks=no + +# Clear in-memory caches upon conclusion of linting. Useful if running pylint +# in a server-like mode. +clear-cache-post-run=no + +# Load and enable all available extensions. Use --list-extensions to see a list +# all available extensions. +#enable-all-extensions= + +# In error mode, messages with a category besides ERROR or FATAL are +# suppressed, and no reports are done by default. Error mode is compatible with +# disabling specific errors. +#errors-only= + +# Always return a 0 (non-error) status code, even if lint errors are found. +# This is primarily useful in continuous integration scripts. +#exit-zero= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. +extension-pkg-allow-list= + +# A comma-separated list of package or module names from where C extensions may +# be loaded. Extensions are loading into the active Python interpreter and may +# run arbitrary code. (This is an alternative name to extension-pkg-allow-list +# for backward compatibility.) +extension-pkg-whitelist= + +# Return non-zero exit code if any of these messages/categories are detected, +# even if score is above --fail-under value. Syntax same as enable. Messages +# specified are enabled, while categories only check already-enabled messages. +fail-on= + +# Specify a score threshold under which the program will exit with error. +fail-under=10 + +# Interpret the stdin as a python script, whose filename needs to be passed as +# the module_or_package argument. +#from-stdin= + +# Files or directories to be skipped. They should be base names, not paths. +ignore=CVS + +# Add files or directories matching the regular expressions patterns to the +# ignore-list. The regex matches against paths and can be in Posix or Windows +# format. Because '\\' represents the directory delimiter on Windows systems, +# it can't be used as an escape character. +ignore-paths= + +# Files or directories matching the regular expression patterns are skipped. +# The regex matches against base names, not paths. The default value ignores +# Emacs file locks +ignore-patterns=\..* + +# List of module names for which member attributes should not be checked +# (useful for modules/projects where namespaces are manipulated during runtime +# and thus existing member attributes cannot be deduced by static analysis). It +# supports qualified module names, as well as Unix pattern matching. +ignored-modules= + +# Python code to execute, usually for sys.path manipulation such as +# pygtk.require(). +#init-hook= + +# Use multiple processes to speed up Pylint. Specifying 0 will auto-detect the +# number of processors available to use, and will cap the count on Windows to +# avoid hangs. +jobs=1 + +# Control the amount of potential inferred values when inferring a single +# object. This can help the performance when dealing with large functions or +# complex, nested conditions. +limit-inference-results=100 + +# List of plugins (as comma separated values of python module names) to load, +# usually to register additional checkers. +load-plugins= + +# Pickle collected data for later comparisons. +persistent=yes + +# Minimum Python version to use for version dependent checks. Will default to +# the version used to run pylint. +py-version=3.8 + +# Discover python modules and packages in the file system subtree. +recursive=no + +# Add paths to the list of the source roots. Supports globbing patterns. The +# source root is an absolute path or a path relative to the current working +# directory used to determine a package namespace for modules located under the +# source root. +source-roots= + +# When enabled, pylint would attempt to guess common misconfiguration and emit +# user-friendly hints instead of false-positive error messages. +suggestion-mode=yes + +# Allow loading of arbitrary C extensions. Extensions are imported into the +# active Python interpreter and may run arbitrary code. +unsafe-load-any-extension=no + +# In verbose mode, extra non-checker-related info will be displayed. +#verbose= + + +[BASIC] + +# Naming style matching correct argument names. +argument-naming-style=snake_case + +# Regular expression matching correct argument names. Overrides argument- +# naming-style. If left empty, argument names will be checked with the set +# naming style. +#argument-rgx= + +# Naming style matching correct attribute names. +attr-naming-style=snake_case + +# Regular expression matching correct attribute names. Overrides attr-naming- +# style. If left empty, attribute names will be checked with the set naming +# style. +#attr-rgx= + +# Bad variable names which should always be refused, separated by a comma. +bad-names=foo, + bar, + baz, + toto, + tutu, + tata + +# Bad variable names regexes, separated by a comma. If names match any regex, +# they will always be refused +bad-names-rgxs= + +# Naming style matching correct class attribute names. +class-attribute-naming-style=any + +# Regular expression matching correct class attribute names. Overrides class- +# attribute-naming-style. If left empty, class attribute names will be checked +# with the set naming style. +#class-attribute-rgx= + +# Naming style matching correct class constant names. +class-const-naming-style=UPPER_CASE + +# Regular expression matching correct class constant names. Overrides class- +# const-naming-style. If left empty, class constant names will be checked with +# the set naming style. +#class-const-rgx= + +# Naming style matching correct class names. +class-naming-style=PascalCase + +# Regular expression matching correct class names. Overrides class-naming- +# style. If left empty, class names will be checked with the set naming style. +#class-rgx= + +# Naming style matching correct constant names. +const-naming-style=UPPER_CASE + +# Regular expression matching correct constant names. Overrides const-naming- +# style. If left empty, constant names will be checked with the set naming +# style. +#const-rgx= + +# Minimum line length for functions/classes that require docstrings, shorter +# ones are exempt. +docstring-min-length=-1 + +# Naming style matching correct function names. +function-naming-style=snake_case + +# Regular expression matching correct function names. Overrides function- +# naming-style. If left empty, function names will be checked with the set +# naming style. +#function-rgx= + +# Good variable names which should always be accepted, separated by a comma. +good-names=i, + j, + k, + ex, + Run, + _ + +# Good variable names regexes, separated by a comma. If names match any regex, +# they will always be accepted +good-names-rgxs= + +# Include a hint for the correct naming format with invalid-name. +include-naming-hint=no + +# Naming style matching correct inline iteration names. +inlinevar-naming-style=any + +# Regular expression matching correct inline iteration names. Overrides +# inlinevar-naming-style. If left empty, inline iteration names will be checked +# with the set naming style. +#inlinevar-rgx= + +# Naming style matching correct method names. +method-naming-style=snake_case + +# Regular expression matching correct method names. Overrides method-naming- +# style. If left empty, method names will be checked with the set naming style. +#method-rgx= + +# Naming style matching correct module names. +module-naming-style=snake_case + +# Regular expression matching correct module names. Overrides module-naming- +# style. If left empty, module names will be checked with the set naming style. +#module-rgx= + +# Colon-delimited sets of names that determine each other's naming style when +# the name regexes allow several styles. +name-group= + +# Regular expression which should only match function or class names that do +# not require a docstring. +no-docstring-rgx=^_ + +# List of decorators that produce properties, such as abc.abstractproperty. Add +# to this list to register other decorators that produce valid properties. +# These decorators are taken in consideration only for invalid-name. +property-classes=abc.abstractproperty + +# Regular expression matching correct type alias names. If left empty, type +# alias names will be checked with the set naming style. +#typealias-rgx= + +# Regular expression matching correct type variable names. If left empty, type +# variable names will be checked with the set naming style. +#typevar-rgx= + +# Naming style matching correct variable names. +variable-naming-style=snake_case + +# Regular expression matching correct variable names. Overrides variable- +# naming-style. If left empty, variable names will be checked with the set +# naming style. +#variable-rgx= + + +[CLASSES] + +# Warn about protected attribute access inside special methods +check-protected-access-in-special-methods=no + +# List of method names used to declare (i.e. assign) instance attributes. +defining-attr-methods=__init__, + __new__, + setUp, + asyncSetUp, + __post_init__ + +# List of member names, which should be excluded from the protected access +# warning. +exclude-protected=_asdict,_fields,_replace,_source,_make,os._exit + +# List of valid names for the first argument in a class method. +valid-classmethod-first-arg=cls + +# List of valid names for the first argument in a metaclass class method. +valid-metaclass-classmethod-first-arg=mcs + + +[DESIGN] + +# List of regular expressions of class ancestor names to ignore when counting +# public methods (see R0903) +exclude-too-few-public-methods= + +# List of qualified class names to ignore when counting class parents (see +# R0901) +ignored-parents= + +# Maximum number of arguments for function / method. +max-args=7 + +# Maximum number of attributes for a class (see R0902). +max-attributes=10 + +# Maximum number of boolean expressions in an if statement (see R0916). +max-bool-expr=5 + +# Maximum number of branch for function / method body. +max-branches=12 + +# Maximum number of locals for function / method body. +max-locals=15 + +# Maximum number of parents for a class (see R0901). +max-parents=7 + +# Maximum number of public methods for a class (see R0904). +max-public-methods=20 + +# Maximum number of return / yield for function / method body. +max-returns=6 + +# Maximum number of statements in function / method body. +max-statements=50 + +# Minimum number of public methods for a class (see R0903). +min-public-methods=2 + + +[EXCEPTIONS] + +# Exceptions that will emit a warning when caught. +overgeneral-exceptions=builtins.BaseException,builtins.Exception + + +[FORMAT] + +# Expected format of line ending, e.g. empty (any line ending), LF or CRLF. +expected-line-ending-format= + +# Regexp for a line that is allowed to be longer than the limit. +ignore-long-lines=^\s*(# )??$ + +# Number of spaces of indent required inside a hanging or continued line. +indent-after-paren=4 + +# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1 +# tab). +indent-string=' ' + +# Maximum number of characters on a single line. +max-line-length=140 + +# Maximum number of lines in a module. +max-module-lines=1000 + +# Allow the body of a class to be on the same line as the declaration if body +# contains single statement. +single-line-class-stmt=no + +# Allow the body of an if to be on the same line as the test if there is no +# else. +single-line-if-stmt=no + + +[IMPORTS] + +# List of modules that can be imported at any level, not just the top level +# one. +allow-any-import-level= + +# Allow explicit reexports by alias from a package __init__. +allow-reexport-from-package=no + +# Allow wildcard imports from modules that define __all__. +allow-wildcard-with-all=no + +# Deprecated modules which should not be used, separated by a comma. +deprecated-modules= + +# Output a graph (.gv or any supported image format) of external dependencies +# to the given file (report RP0402 must not be disabled). +ext-import-graph= + +# Output a graph (.gv or any supported image format) of all (i.e. internal and +# external) dependencies to the given file (report RP0402 must not be +# disabled). +import-graph= + +# Output a graph (.gv or any supported image format) of internal dependencies +# to the given file (report RP0402 must not be disabled). +int-import-graph= + +# Force import order to recognize a module as part of the standard +# compatibility libraries. +known-standard-library= + +# Force import order to recognize a module as part of a third party library. +known-third-party=enchant + +# Couples of modules and preferred modules, separated by a comma. +preferred-modules= + + +[LOGGING] + +# The type of string formatting that logging methods do. `old` means using % +# formatting, `new` is for `{}` formatting. +logging-format-style=old + +# Logging modules to check that the string format arguments are in logging +# function parameter format. +logging-modules=logging + + +[MESSAGES CONTROL] + +# Only show warnings with the listed confidence levels. Leave empty to show +# all. Valid levels: HIGH, CONTROL_FLOW, INFERENCE, INFERENCE_FAILURE, +# UNDEFINED. +confidence=HIGH, + CONTROL_FLOW, + INFERENCE, + INFERENCE_FAILURE, + UNDEFINED + +# Disable the message, report, category or checker with the given id(s). You +# can either give multiple identifiers separated by comma (,) or put this +# option multiple times (only on the command line, not in the configuration +# file where it should appear only once). You can also use "--disable=all" to +# disable everything first and then re-enable specific checks. For example, if +# you want to run only the similarities checker, you can use "--disable=all +# --enable=similarities". If you want to run only the classes checker, but have +# no Warning level messages displayed, use "--disable=all --enable=classes +# --disable=W". +disable=raw-checker-failed, + bad-inline-option, + locally-disabled, + file-ignored, + suppressed-message, + useless-suppression, + deprecated-pragma, + use-symbolic-message-instead, + use-implicit-booleaness-not-comparison-to-string, + use-implicit-booleaness-not-comparison-to-zero, + missing-function-docstring, + missing-class-docstring, + missing-module-docstring, + logging-fstring-interpolation, + broad-exception-raised, + too-few-public-methods + +# Enable the message, report, category or checker with the given id(s). You can +# either give multiple identifier separated by comma (,) or put this option +# multiple time (only on the command line, not in the configuration file where +# it should appear only once). See also the "--disable" option for examples. +enable= + + +[METHOD_ARGS] + +# List of qualified names (i.e., library.method) which require a timeout +# parameter e.g. 'requests.api.get,requests.api.post' +timeout-methods=requests.api.delete,requests.api.get,requests.api.head,requests.api.options,requests.api.patch,requests.api.post,requests.api.put,requests.api.request + + +[MISCELLANEOUS] + +# List of note tags to take in consideration, separated by a comma. +notes=FIXME, + XXX, + TODO + +# Regular expression of note tags to take in consideration. +notes-rgx= + + +[REFACTORING] + +# Maximum number of nested blocks for function / method body +max-nested-blocks=5 + +# Complete name of functions that never returns. When checking for +# inconsistent-return-statements if a never returning function is called then +# it will be considered as an explicit return statement and no message will be +# printed. +never-returning-functions=sys.exit,argparse.parse_error + + +[REPORTS] + +# Python expression which should return a score less than or equal to 10. You +# have access to the variables 'fatal', 'error', 'warning', 'refactor', +# 'convention', and 'info' which contain the number of messages in each +# category, as well as 'statement' which is the total number of statements +# analyzed. This score is used by the global evaluation report (RP0004). +evaluation=max(0, 0 if fatal else 10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)) + +# Template used to display messages. This is a python new-style format string +# used to format the message information. See doc for all details. +msg-template= + +# Set the output format. Available formats are: text, parseable, colorized, +# json2 (improved json format), json (old json format) and msvs (visual +# studio). You can also give a reporter class, e.g. +# mypackage.mymodule.MyReporterClass. +#output-format= + +# Tells whether to display a full report or only the messages. +reports=no + +# Activate the evaluation score. +score=yes + + +[SIMILARITIES] + +# Comments are removed from the similarity computation +ignore-comments=yes + +# Docstrings are removed from the similarity computation +ignore-docstrings=yes + +# Imports are removed from the similarity computation +ignore-imports=yes + +# Signatures are removed from the similarity computation +ignore-signatures=yes + +# Minimum lines number of a similarity. +min-similarity-lines=4 + + +[SPELLING] + +# Limits count of emitted suggestions for spelling mistakes. +max-spelling-suggestions=4 + +# Spelling dictionary name. No available dictionaries : You need to install +# both the python package and the system dependency for enchant to work. +spelling-dict= + +# List of comma separated words that should be considered directives if they +# appear at the beginning of a comment and should not be checked. +spelling-ignore-comment-directives=fmt: on,fmt: off,noqa:,noqa,nosec,isort:skip,mypy: + +# List of comma separated words that should not be checked. +spelling-ignore-words= + +# A path to a file that contains the private dictionary; one word per line. +spelling-private-dict-file= + +# Tells whether to store unknown words to the private dictionary (see the +# --spelling-private-dict-file option) instead of raising a message. +spelling-store-unknown-words=no + + +[STRING] + +# This flag controls whether inconsistent-quotes generates a warning when the +# character used as a quote delimiter is used inconsistently within a module. +check-quote-consistency=no + +# This flag controls whether the implicit-str-concat should generate a warning +# on implicit string concatenation in sequences defined over several lines. +check-str-concat-over-line-jumps=no + + +[TYPECHECK] + +# List of decorators that produce context managers, such as +# contextlib.contextmanager. Add to this list to register other decorators that +# produce valid context managers. +contextmanager-decorators=contextlib.contextmanager + +# List of members which are set dynamically and missed by pylint inference +# system, and so shouldn't trigger E1101 when accessed. Python regular +# expressions are accepted. +generated-members= + +# Tells whether to warn about missing members when the owner of the attribute +# is inferred to be None. +ignore-none=yes + +# This flag controls whether pylint should warn about no-member and similar +# checks whenever an opaque object is returned when inferring. The inference +# can return multiple potential results while evaluating a Python object, but +# some branches might not be evaluated, which results in partial inference. In +# that case, it might be useful to still emit no-member and other checks for +# the rest of the inferred objects. +ignore-on-opaque-inference=yes + +# List of symbolic message names to ignore for Mixin members. +ignored-checks-for-mixins=no-member, + not-async-context-manager, + not-context-manager, + attribute-defined-outside-init + +# List of class names for which member attributes should not be checked (useful +# for classes with dynamically set attributes). This supports the use of +# qualified names. +ignored-classes=optparse.Values,thread._local,_thread._local,argparse.Namespace + +# Show a hint with possible names when a member name was not found. The aspect +# of finding the hint is based on edit distance. +missing-member-hint=yes + +# The minimum edit distance a name should have in order to be considered a +# similar match for a missing member name. +missing-member-hint-distance=1 + +# The total number of similar names that should be taken in consideration when +# showing a hint for a missing member. +missing-member-max-choices=1 + +# Regex pattern to define which classes are considered mixins. +mixin-class-rgx=.*[Mm]ixin + +# List of decorators that change the signature of a decorated function. +signature-mutators= + + +[VARIABLES] + +# List of additional names supposed to be defined in builtins. Remember that +# you should avoid defining new builtins when possible. +additional-builtins= + +# Tells whether unused global variables should be treated as a violation. +allow-global-unused-variables=yes + +# List of names allowed to shadow builtins +allowed-redefined-builtins= + +# List of strings which can identify a callback function by name. A callback +# name must start or end with one of those strings. +callbacks=cb_, + _cb + +# A regular expression matching the name of dummy variables (i.e. expected to +# not be used). +dummy-variables-rgx=_+$|(_[a-zA-Z0-9_]*[a-zA-Z0-9]+?$)|dummy|^ignored_|^unused_ + +# Argument names that match this expression will be ignored. +ignored-argument-names=_.*|^ignored_|^unused_ + +# Tells whether we should check for unused import in __init__ files. +init-import=no + +# List of qualified module names which can have objects that can redefine +# builtins. +redefining-builtins-modules=six.moves,past.builtins,future.builtins,builtins,io diff --git a/python/remotivelabs-broker/.ruff.toml b/python/remotivelabs-broker/.ruff.toml new file mode 100644 index 0000000..64b40bb --- /dev/null +++ b/python/remotivelabs-broker/.ruff.toml @@ -0,0 +1,76 @@ +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", + 'deps', + 'binaries', + '__pycache__' +] + +line-length = 140 +indent-width = 4 + +# Assume Python 3.8 +target-version = "py38" + +[lint] +select = ["C901", "E", "W", "F", "RET505", "I001", "B034", "EXE001", "N806", "UP032"] +ignore = [] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" diff --git a/python/remotivelabs-broker/misc/__init__.py b/python/remotivelabs-broker/misc/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/remotivelabs-broker/misc/fix_import_statements.py b/python/remotivelabs-broker/misc/fix_import_statements.py index 673f22b..dd55b7a 100644 --- a/python/remotivelabs-broker/misc/fix_import_statements.py +++ b/python/remotivelabs-broker/misc/fix_import_statements.py @@ -8,21 +8,20 @@ # This script goes through all the python files in the folder and does the replacement based on the regex pattern # `regex_string` defined below. -import re import glob +import re files = glob.glob("remotivelabs/broker/generated/sync/*.py") files = files + glob.glob("remotivelabs/broker/generated/sync/*.pyi") -regex_string = r"^import \w+_pb2" -substitute_string = "from . \\g<0>" +REGEX_STRING = r"^import \w+_pb2" +SUBSTITUTE_STRING = "from . \\g<0>" # You can manually specify the number of replacements by changing the 4th argument for file in files: - stream = open(file, "rt") - contents = stream.read() - result = re.sub(regex_string, substitute_string, contents, 0, re.MULTILINE) - stream.close() - stream = open(file, "wt") - stream.write(result) - stream.close() + with open(file, encoding="utf-8") as stream: + contents = stream.read() + result = re.sub(REGEX_STRING, SUBSTITUTE_STRING, contents, count=0, flags=re.MULTILINE) + + with open(file, encoding="utf-8", mode="wt") as stream: + stream.write(result) diff --git a/python/remotivelabs-broker/remotivelabs/__init__.py b/python/remotivelabs-broker/remotivelabs/__init__.py new file mode 100644 index 0000000..b36383a --- /dev/null +++ b/python/remotivelabs-broker/remotivelabs/__init__.py @@ -0,0 +1,3 @@ +from pkgutil import extend_path + +__path__ = extend_path(__path__, __name__) diff --git a/python/remotivelabs-broker/remotivelabs/broker/__init__.py b/python/remotivelabs-broker/remotivelabs/broker/__init__.py index 5cf30ba..984e275 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/__init__.py +++ b/python/remotivelabs-broker/remotivelabs/broker/__init__.py @@ -24,9 +24,10 @@ # # SPDX-License-Identifier: Apache-2.0 -from .__about__ import __version__ import logging +from .__about__ import __version__ + log: logging.Logger = logging.getLogger("com.remotivelabs.broker") """Package logging interface""" diff --git a/python/remotivelabs-broker/remotivelabs/broker/generated/__init__.py b/python/remotivelabs-broker/remotivelabs/broker/generated/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/python/remotivelabs-broker/remotivelabs/broker/py.typed b/python/remotivelabs-broker/remotivelabs/broker/py.typed new file mode 100644 index 0000000..e69de29 diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py b/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py index 19fd0cb..1c05a08 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/__init__.py @@ -17,44 +17,42 @@ - `remotivelabs.broker.sync.traffic_api_pb2`. - `remotivelabs.broker.sync.traffic_api_pb2_grpc`. -For an example on how to use these we recommend looking at the samples for this library. Which is available at the repository remotiveLabs samples: +For an example on how to use these we recommend looking at the samples for this library. +Which is available at the repository remotiveLabs samples: Link: . """ -from ..generated.sync import common_pb2 -from ..generated.sync import common_pb2_grpc -from ..generated.sync import diagnostics_api_pb2 -from ..generated.sync import diagnostics_api_pb2_grpc -from ..generated.sync import functional_api_pb2 -from ..generated.sync import functional_api_pb2_grpc -from ..generated.sync import network_api_pb2 -from ..generated.sync import network_api_pb2_grpc -from ..generated.sync import system_api_pb2 -from ..generated.sync import system_api_pb2_grpc -from ..generated.sync import traffic_api_pb2 -from ..generated.sync import traffic_api_pb2_grpc - +from ..generated.sync import ( + common_pb2, # noqa: F401 + common_pb2_grpc, # noqa: F401 + diagnostics_api_pb2, # noqa: F401 + diagnostics_api_pb2_grpc, # noqa: F401 + functional_api_pb2, # noqa: F401 + functional_api_pb2_grpc, # noqa: F401 + network_api_pb2, # noqa: F401 + network_api_pb2_grpc, # noqa: F401 + system_api_pb2, # noqa: F401 + system_api_pb2_grpc, # noqa: F401 + traffic_api_pb2, # noqa: F401 + traffic_api_pb2_grpc, # noqa: F401 +) +from .client import BrokerException, Client, SignalIdentifier, SignalsInFrame, SignalValue +from .helper import ( + act_on_scripted_signal, + act_on_signal, + check_license, + create_channel, + download_file, + generate_data, + get_sha256, + printer, + publish_signals, + reload_configuration, + upload_file, + upload_folder, +) from .signalcreator import SignalCreator -from .client import Client -from .client import SignalValue -from .client import SignalsInFrame -from .client import SignalIdentifier -from .client import BrokerException -from .client import SignalsInFrame -from .helper import create_channel -from .helper import publish_signals -from .helper import printer -from .helper import get_sha256 -from .helper import generate_data -from .helper import upload_file -from .helper import download_file -from .helper import upload_folder -from .helper import reload_configuration -from .helper import check_license -from .helper import act_on_signal -from .helper import act_on_scripted_signal - __all__ = [ "SignalsInFrame", diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/client.py b/python/remotivelabs-broker/remotivelabs/broker/sync/client.py index fb6921e..261b8cd 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/client.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/client.py @@ -1,17 +1,17 @@ -#!/usr/bin/env python3 +from __future__ import annotations import binascii import json import queue from threading import Thread -from typing import Union, Callable, List, Iterable +from typing import Callable, Iterable, List, Optional, Union + +import grpc -from . import SignalCreator -from . import helper as br from ..generated.sync import network_api_pb2 as network_api -from ..generated.sync import network_api_pb2_grpc -from ..generated.sync import system_api_pb2_grpc -from ..generated.sync import traffic_api_pb2_grpc +from ..generated.sync import network_api_pb2_grpc, system_api_pb2_grpc, traffic_api_pb2_grpc +from . import helper as br +from .signalcreator import SignalCreator class SignalValue: @@ -49,20 +49,18 @@ def is_raw(self) -> bool: def get_raw(self) -> Union[bytes, None]: if self.is_raw(): return self.signal.raw - else: - return None + return None def __get_value(self) -> Union[str, int, float, bool, None]: if self.signal.raw != b"": return "0x" + binascii.hexlify(self.signal.raw).decode("ascii") - elif self.signal.HasField("integer"): + if self.signal.HasField("integer"): return self.signal.integer - elif self.signal.HasField("double"): + if self.signal.HasField("double"): return self.signal.double - elif self.signal.HasField("arbitration"): + if self.signal.HasField("arbitration"): return self.signal.arbitration - else: - return None + return None def timestamp_us(self): return self.signal.timestamp @@ -80,8 +78,7 @@ def __get_with_ensured_type(self, t: type): v = self.__get_value() if isinstance(v, t): return v - else: - raise BrokerException(f"{v} was not expected type '{t}' but got '{type(v)}'") + raise BrokerException(f"{v} was not expected type '{t}' but got '{type(v)}'") def float_value(self): return self.__get_with_ensured_type(float) @@ -97,15 +94,14 @@ def bytes_value(self): def as_dict(self): return { - 'timestamp_us': self.timestamp_us(), - 'name': self.name(), - 'namespace': self.namespace(), - 'value': self.value() + "timestamp_us": self.timestamp_us(), + "name": self.name(), + "namespace": self.namespace(), + "value": self.value(), } class SignalsInFrame(Iterable): - def __init__(self, signals: List[SignalValue]): self.signals = signals self.index = 0 @@ -116,6 +112,7 @@ def __iter__(self): def __next__(self): try: result = self.signals[self.index] + # pylint: disable=raise-missing-from except IndexError: raise StopIteration self.index += 1 @@ -133,23 +130,19 @@ class BrokerException(Exception): class Client: - def __init__(self, client_id: str = "broker_client"): - - self._signal_creator = None - self._traffic_stub = None - self._system_stub = None - self._network_stub = None - self._intercept_channel = None + self._signal_creator: SignalCreator + self._traffic_stub: traffic_api_pb2_grpc.TrafficServiceStub + self._system_stub: system_api_pb2_grpc.SystemServiceStub + self._network_stub: network_api_pb2_grpc.NetworkServiceStub + self._intercept_channel: grpc.Channel self.client_id = client_id - self.url = None - self.api_key = None + self.url: Optional[str] = None + self.api_key: Optional[str] = None self.on_connect: Union[Callable[[Client], None], None] = None self.on_signals: Union[Callable[[SignalsInFrame], None], None] = None - def connect(self, - url: str, - api_key: Union[str, None] = None): + def connect(self, url: str, api_key: Union[str, None] = None): self.url = url self.api_key = api_key if url.startswith("https"): @@ -166,8 +159,9 @@ def connect(self, if self.on_connect is not None: self.on_connect(self) - def _validate_and_get_subscribed_signals(self, subscribed_namespaces: List[str], subscribed_signals: List[str]) \ - -> List[SignalIdentifier]: + def _validate_and_get_subscribed_signals( + self, subscribed_namespaces: List[str], subscribed_signals: List[str] + ) -> List[SignalIdentifier]: # Since we cannot know which List[signals] belongs to which namespace we need to fetch # all signals from the broker and find the proper signal with namespace. Finally, we # also filter out namespaces that we do not need since we might have duplicated signal names @@ -175,7 +169,12 @@ def _validate_and_get_subscribed_signals(self, subscribed_namespaces: List[str], # Begin def verify_namespace(available_signal: SignalIdentifier): - return list(filter(lambda namespace: available_signal.namespace == namespace, subscribed_namespaces)) + return list( + filter( + lambda namespace: available_signal.namespace == namespace, + subscribed_namespaces, + ) + ) def find_subscribed_signal(available_signal: SignalIdentifier): return list(filter(lambda s: available_signal.name == s, subscribed_signals)) @@ -184,36 +183,34 @@ def find_subscribed_signal(available_signal: SignalIdentifier): signals_to_subscribe_to: List[SignalIdentifier] = list(filter(find_subscribed_signal, available_signals)) # Check if subscription is done on signal that is not in any of these namespaces - signals_subscribed_to_but_does_not_exist = \ - set(subscribed_signals) - set(map(lambda s: s.name, signals_to_subscribe_to)) + signals_subscribed_to_but_does_not_exist = set(subscribed_signals) - set(map(lambda s: s.name, signals_to_subscribe_to)) if len(signals_subscribed_to_but_does_not_exist) > 0: - raise BrokerException(f"One or more signals you subscribed to does not exist " - f", {signals_subscribed_to_but_does_not_exist}") + raise BrokerException(f"One or more signals you subscribed to does not exist " f", {signals_subscribed_to_but_does_not_exist}") return list(map(lambda s: SignalIdentifier(s.name, s.namespace), signals_to_subscribe_to)) - def subscribe(self, - signal_names: List[str], - namespaces: List[str], - on_signals: Callable[[SignalsInFrame], None] = None, - changed_values_only: bool = True): - + def subscribe( + self, + signal_names: List[str], + namespaces: List[str], + on_signals: Callable[[SignalsInFrame], None], + changed_values_only: bool = True, + ): if on_signals is None and self.on_signals is None: raise BrokerException( "You have not specified global client.on_signals nor client.subscribe(on_signals=callback), " - "or you are invoking subscribe() before client.on_signals which is not allowed") + "or you are invoking subscribe() before client.on_signals which is not allowed" + ) client_id = br.common_pb2.ClientId(id=self.client_id) - signals_to_subscribe_to: List[SignalIdentifier] = self._validate_and_get_subscribed_signals( - namespaces, - signal_names) + signals_to_subscribe_to: List[SignalIdentifier] = self._validate_and_get_subscribed_signals(namespaces, signal_names) def to_protobuf_signal(s: SignalIdentifier): return self._signal_creator.signal(s.name, s.namespace) signals_to_subscribe_on = list(map(to_protobuf_signal, signals_to_subscribe_to)) - wait_for_subscription_queue = queue.Queue() + wait_for_subscription_queue: queue.Queue = queue.Queue() Thread( target=br.act_on_signal, args=( @@ -222,7 +219,7 @@ def to_protobuf_signal(s: SignalIdentifier): signals_to_subscribe_on, changed_values_only, # True: only report when signal changes lambda frame: self._on_signals(frame, on_signals), - lambda sub: (wait_for_subscription_queue.put((self.client_id, sub))) + lambda sub: (wait_for_subscription_queue.put((self.client_id, sub))), ), ).start() client_id, subscription = wait_for_subscription_queue.get() @@ -233,20 +230,21 @@ def _on_signals(self, signals_in_frame: network_api.Signals, callback): Updates "local" callback or global on_signals callback if local callback is None """ if callback is not None: - callback(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) + # pylint: disable=unnecessary-lambda + callback(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) # type: ignore[call-overload] elif self.on_signals is not None: - self.on_signals(SignalsInFrame(list(map(lambda s: SignalValue(s), signals_in_frame)))) + self.on_signals(SignalsInFrame(list(map(SignalValue)))) # type: ignore[call-overload] def list_signal_names(self) -> List[SignalIdentifier]: # Lists available signals configuration = self._system_stub.GetConfiguration(br.common_pb2.Empty()) signal_names: List[SignalIdentifier] = [] - for networkInfo in configuration.networkInfo: - res = self._system_stub.ListSignals(networkInfo.namespace) + for network_info in configuration.networkInfo: + res = self._system_stub.ListSignals(network_info.namespace) for finfo in res.frame: # f: br.common_pb2.FrameInfo = finfo - signal_names.append(SignalIdentifier(finfo.signalInfo.id.name, networkInfo.namespace.name)) + signal_names.append(SignalIdentifier(finfo.signalInfo.id.name, network_info.namespace.name)) for sinfo in finfo.childInfo: - signal_names.append(SignalIdentifier(sinfo.id.name, networkInfo.namespace.name)) + signal_names.append(SignalIdentifier(sinfo.id.name, network_info.namespace.name)) return signal_names diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py b/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py index 4a87c82..95e419f 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/helper.py @@ -1,22 +1,20 @@ -from ..generated.sync import common_pb2 -from ..generated.sync import network_api_pb2 -from ..generated.sync import system_api_pb2 -from .. import log -import remotivelabs.broker.sync as br -import os +from __future__ import annotations -import base64 -import grpc import hashlib import itertools import ntpath +import os import posixpath - from glob import glob -from grpc_interceptor import ClientCallDetails, ClientInterceptor -from typing import Any, Callable, Sequence, Generator, Optional +from typing import Any, Callable, Generator, Optional, Sequence from urllib.parse import urlparse +import grpc +from grpc_interceptor import ClientCallDetails, ClientInterceptor + +from .. import log +from ..generated.sync import common_pb2, network_api_pb2, network_api_pb2_grpc, system_api_pb2, system_api_pb2_grpc + class HeaderInterceptor(ClientInterceptor): def __init__(self, header_dict): @@ -40,20 +38,7 @@ def intercept( return method(request_or_iterator, new_details) -def create_channel(url: str, x_api_key: Optional[str] = None) -> grpc.intercept_channel: - """ - Create communication channels for gRPC calls. - - :param url: URL to broker - :param x_api_key: API key used with RemotiveBroker running in cloud (deprecated). - :param authorization_token: Access token replacing api-keys moving forward. - :return: gRPC channel - """ - return create_channel(url, x_api_key, None) - - -def create_channel(url: str, x_api_key: Optional[str] = None, - authorization_token: Optional[str] = None) -> grpc.intercept_channel: +def create_channel(url: str, x_api_key: Optional[str] = None, authorization_token: Optional[str] = None) -> grpc.Channel: """ Create communication channels for gRPC calls. @@ -63,31 +48,33 @@ def create_channel(url: str, x_api_key: Optional[str] = None, :return: gRPC channel """ - url = urlparse(url) + parsed_url = urlparse(url) + if parsed_url.hostname is None: + raise BaseException() - if url.scheme == "https": - creds = grpc.ssl_channel_credentials( - root_certificates=None, private_key=None, certificate_chain=None - ) - channel = grpc.secure_channel( - url.hostname + ":" + str(url.port or "443"), creds - ) + if parsed_url.scheme == "https": + creds = grpc.ssl_channel_credentials(root_certificates=None, private_key=None, certificate_chain=None) + channel = grpc.secure_channel(parsed_url.hostname + ":" + str(parsed_url.port or "443"), creds) else: - addr = url.hostname + ":" + str(url.port or "50051") + addr = parsed_url.hostname + ":" + str(parsed_url.port or "50051") channel = grpc.insecure_channel(addr) if x_api_key is None and authorization_token is None: return channel - elif x_api_key is not None: - return grpc.intercept_channel( - channel, HeaderInterceptor({"x-api-key": x_api_key}) - ) - else: - # Adding both x-api-key (old) and authorization header for compatibility - return grpc.intercept_channel( - channel, HeaderInterceptor({"x-api-key": authorization_token, - "authorization": f"Bearer {authorization_token}"}) - ) + + if x_api_key is not None: + return grpc.intercept_channel(channel, HeaderInterceptor({"x-api-key": x_api_key})) + + # Adding both x-api-key (old) and authorization header for compatibility + return grpc.intercept_channel( + channel, + HeaderInterceptor( + { + "x-api-key": authorization_token, + "authorization": f"Bearer {authorization_token}", + } + ), + ) def publish_signals(client_id, stub, signals_with_payload, frequency: int = 0) -> None: @@ -107,7 +94,8 @@ def publish_signals(client_id, stub, signals_with_payload, frequency: int = 0) - try: stub.PublishSignals(publisher_info) - except grpc._channel._Rendezvous as err: + # pylint: disable=protected-access + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) @@ -119,11 +107,7 @@ def printer(signals: Sequence[common_pb2.SignalId]) -> None: """ for signal in signals: - log.info( - "{} {} {}".format( - signal.id.name, signal.id.namespace.name, get_value(signal) - ) - ) + log.info(f"{signal} {signal.namespace.name}") def get_sha256(path: str) -> str: @@ -134,22 +118,17 @@ def get_sha256(path: str) -> str: :rtype int: """ - f = open(path, "rb") - bytes = f.read() # read entire file as bytes - readable_hash = hashlib.sha256(bytes).hexdigest() - f.close() + with open(path, "rb") as f: + b = f.read() # read entire file as bytes + readable_hash = hashlib.sha256(b).hexdigest() return readable_hash -def generate_data( - file, dest_path, chunk_size, sha256 -) -> Generator[system_api_pb2.FileUploadRequest, None, None]: +def generate_data(file, dest_path, chunk_size, sha256) -> Generator[system_api_pb2.FileUploadRequest, None, None]: for x in itertools.count(start=0): if x == 0: - fileDescription = system_api_pb2.FileDescription( - sha256=sha256, path=dest_path - ) - yield system_api_pb2.FileUploadRequest(fileDescription=fileDescription) + file_description = system_api_pb2.FileDescription(sha256=sha256, path=dest_path) + yield system_api_pb2.FileUploadRequest(fileDescription=file_description) else: buf = file.read(chunk_size) if not buf: @@ -157,9 +136,7 @@ def generate_data( yield system_api_pb2.FileUploadRequest(chunk=buf) -def upload_file( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str -) -> None: +def upload_file(system_stub: system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str) -> None: """ Upload single file to internal storage on broker. @@ -169,23 +146,16 @@ def upload_file( """ sha256 = get_sha256(path) - log.debug("SHA256 for file {}: {}".format(path, sha256)) - file = open(path, "rb") - - # make sure path is unix style (necessary for windows, and does no harm om - # linux) - upload_iterator = generate_data( - file, dest_path.replace(ntpath.sep, posixpath.sep), 1000000, sha256 - ) - response = system_stub.UploadFile( - upload_iterator, compression=grpc.Compression.Gzip - ) - log.debug("Uploaded {} with response {}".format(path, response)) + log.debug(f"SHA256 for file {path}: {sha256}") + with open(path, "rb") as file: + # make sure path is unix style (necessary for windows, and does no harm om + # linux) + upload_iterator = generate_data(file, dest_path.replace(ntpath.sep, posixpath.sep), 1000000, sha256) + response = system_stub.UploadFile(upload_iterator, compression=grpc.Compression.Gzip) + log.debug(f"Uploaded {path} with response {response}") -def download_file( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str -) -> None: +def download_file(system_stub: system_api_pb2_grpc.SystemServiceStub, path: str, dest_path: str) -> None: """ Download file from Broker remote storage. @@ -194,23 +164,15 @@ def download_file( :param dest_path: Path to file in local file system """ - file = open(dest_path, "wb") - for response in system_stub.BatchDownloadFiles( - system_api_pb2.FileDescriptions( - fileDescriptions=[system_api_pb2.FileDescription(path=path.replace(ntpath.sep, - posixpath.sep))] - ) - ): - assert not response.HasField("errorMessage"), ( - "Error uploading file, message is: %s" % response.errorMessage - ) - file.write(response.chunk) - file.close() + with open(dest_path, "wb") as file: + for response in system_stub.BatchDownloadFiles( + system_api_pb2.FileDescriptions(fileDescriptions=[system_api_pb2.FileDescription(path=path.replace(ntpath.sep, posixpath.sep))]) + ): + assert not response.HasField("errorMessage"), f"Error uploading file, message is: {response.errorMessage}" + file.write(response.chunk) -def upload_folder( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, folder: str -) -> None: +def upload_folder(system_stub: system_api_pb2_grpc.SystemServiceStub, folder: str) -> None: """ Upload directory and its content to Broker remote storage. @@ -218,19 +180,14 @@ def upload_folder( :param folder: Path to directory in local file storage """ - files = [ - y - for x in os.walk(folder) - for y in glob(os.path.join(x[0], "*")) - if not os.path.isdir(y) - ] + files = [y for x in os.walk(folder) for y in glob(os.path.join(x[0], "*")) if not os.path.isdir(y)] assert len(files) != 0, "Specified upload folder is empty or does not exist" for file in files: upload_file(system_stub, file, file.replace(folder, "")) def reload_configuration( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, + system_stub: system_api_pb2_grpc.SystemServiceStub, ) -> None: """ Trigger reload of configuration on Broker. @@ -240,11 +197,11 @@ def reload_configuration( request = common_pb2.Empty() response = system_stub.ReloadConfiguration(request, timeout=60000) - log.debug("Reload configuration with response {}".format(response)) + log.debug(f"Reload configuration with response {response}") def check_license( - system_stub: br.system_api_pb2_grpc.SystemServiceStub, + system_stub: system_api_pb2_grpc.SystemServiceStub, ) -> None: """ Check license to Broker. Throws exception if failure. @@ -252,17 +209,15 @@ def check_license( :param system_stub: System gRPC channel stub """ status = system_stub.GetLicenseInfo(common_pb2.Empty()).status - assert status == system_api_pb2.LicenseStatus.VALID, ( - "Check your license, status is: %d" % status - ) + assert status == system_api_pb2.LicenseStatus.VALID, f"Check your license, status is: {status}" def act_on_signal( - client_id: br.common_pb2.ClientId, - network_stub: br.network_api_pb2_grpc.NetworkServiceStub, - sub_signals: Sequence[br.common_pb2.SignalId], + client_id: common_pb2.ClientId, + network_stub: network_api_pb2_grpc.NetworkServiceStub, + sub_signals: Sequence[common_pb2.SignalId], on_change: bool, - fun: Callable[[Sequence[br.network_api_pb2.Signal]], None], + fun: Callable[[Sequence[network_api_pb2.Signal]], None], on_subscribed: Optional[Callable[..., None]] = None, ) -> None: """ @@ -293,26 +248,27 @@ def act_on_signal( except grpc.RpcError as e: # Only try to cancel if cancel was not already attempted + # pylint: disable=no-member if e.code() != grpc.StatusCode.CANCELLED: try: subscripton.cancel() print("A gRPC error occurred:") print(e) - except grpc.RpcError as e2: + except grpc.RpcError: pass - - except grpc._channel._Rendezvous as err: + # pylint: disable=protected-access, bad-except-order + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) # reload, alternatively non-existing signal log.debug("Subscription terminated") def act_on_scripted_signal( - client_id: br.common_pb2.ClientId, - network_stub: br.network_api_pb2_grpc.NetworkServiceStub, + client_id: common_pb2.ClientId, + network_stub: network_api_pb2_grpc.NetworkServiceStub, script: bytes, on_change: bool, - fun: Callable[[Sequence[br.network_api_pb2.Signal]], None], + fun: Callable[[Sequence[network_api_pb2.Signal]], None], on_subscribed: Optional[Callable[..., None]] = None, ) -> None: """ @@ -334,9 +290,7 @@ def act_on_scripted_signal( onChange=on_change, ) try: - subscription = network_stub.SubscribeToSignalWithScript( - sub_info, timeout=None - ) + subscription = network_stub.SubscribeToSignalWithScript(sub_info, timeout=None) if on_subscribed: on_subscribed(subscription) log.debug("Waiting for signal...") @@ -348,10 +302,11 @@ def act_on_scripted_signal( subscription.cancel() print("A gRPC error occurred:") print(e) - except grpc.RpcError as e2: + except grpc.RpcError: pass - except grpc._channel._Rendezvous as err: + # pylint: disable=protected-access, bad-except-order + except grpc._channel._Rendezvous as err: # type:ignore[attr-defined] log.error(err) # reload, alternatively non-existing signal log.debug("Subscription terminated") diff --git a/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py b/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py index 30084a1..cae0558 100644 --- a/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py +++ b/python/remotivelabs-broker/remotivelabs/broker/sync/signalcreator.py @@ -1,80 +1,77 @@ -import sys +from __future__ import annotations import logging +from typing import Optional, Sequence, TypeVar -from ..generated.sync import common_pb2 -from ..generated.sync import network_api_pb2 - -from typing import Optional, TypeVar, Sequence +from ..generated.sync import common_pb2, network_api_pb2 T = TypeVar("T") _logger = logging.getLogger("remotivelabs.SignalCreator") -_MSG_DUPLICATE = ( - "Warning duplicated (namespace.signal): {}, to avoid" - + 'ambiguity set "short_names": false in your interfaces.json on {}' -) +_MSG_DUPLICATE = "Warning duplicated (namespace.signal): {}, to avoid" + 'ambiguity set "short_names": false in your interfaces.json on {}' +# pylint: disable=C0103 class MetaGetter: def __init__(self, proto_message): self.meta = proto_message - def _getDefault(field: T, default: Optional[T]) -> T: + def _getDefault(self, field: T, default: Optional[T]) -> T: if field is not None: return field - elif default: + + if default: return default - else: - raise Exception("Failed to retrieve meta data field") + + raise Exception("Failed to retrieve meta data field") def getDescription(self, default: Optional[str] = None) -> str: """Get protobuffer MetaData field description""" - return MetaGetter._getDefault(self.meta.description, default) + return self._getDefault(self.meta.description, default) def getUnit(self, default: Optional[str] = None) -> str: """Get protobuffer MetaData field unit""" - return MetaGetter._getDefault(self.meta.unit, default) + return self._getDefault(self.meta.unit, default) def getMax(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field max""" - return MetaGetter._getDefault(self.meta.max, default) + return self._getDefault(self.meta.max, default) def getMin(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field min""" - return MetaGetter._getDefault(self.meta.min, default) + return self._getDefault(self.meta.min, default) def getSize(self, default: Optional[int] = None) -> int: """Get protobuffer MetaData field size""" - return MetaGetter._getDefault(self.meta.size, default) + return self._getDefault(self.meta.size, default) def getIsRaw(self, default: Optional[bool] = None) -> bool: """Get protobuffer MetaData field isRaw""" - return MetaGetter._getDefault(self.meta.isRaw, default) + return self._getDefault(self.meta.isRaw, default) def getFactor(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field factor""" - return MetaGetter._getDefault(self.meta.factor, default) + return self._getDefault(self.meta.factor, default) def getOffset(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field offset""" - return MetaGetter._getDefault(self.meta.offset, default) + return self._getDefault(self.meta.offset, default) def getSenders(self, default: Optional[Sequence[str]] = None) -> Sequence[str]: """Get protobuffer MetaData field sender""" - return MetaGetter._getDefault(self.meta.sender, default) + return self._getDefault(self.meta.sender, default) def getReceivers(self, default: Optional[Sequence[str]] = None) -> Sequence[str]: """Get protobuffer MetaData field receiver""" - return MetaGetter._getDefault(self.meta.receiver, default) + return self._getDefault(self.meta.receiver, default) def getCycleTime(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field cycleTime""" - return MetaGetter._getDefault(self.meta.cycleTime, default) + return self._getDefault(self.meta.cycleTime, default) def getStartValue(self, default: Optional[float] = None) -> float: """Get protobuffer MetaData field startValue""" - return MetaGetter._getDefault(self.meta.startValue, default) + return self._getDefault(self.meta.startValue, default) class SignalCreator: @@ -132,9 +129,7 @@ def signal(self, name: str, namespace_name: str) -> common_pb2.SignalId: """ self.get_meta(name, namespace_name) # Checks if the signal is present - return common_pb2.SignalId( - name=name, namespace=common_pb2.NameSpace(name=namespace_name) - ) + return common_pb2.SignalId(name=name, namespace=common_pb2.NameSpace(name=namespace_name)) def frames(self, namespace_name: str) -> Sequence[common_pb2.SignalId]: """ @@ -146,9 +141,7 @@ def frames(self, namespace_name: str) -> Sequence[common_pb2.SignalId]: all_frames.append(self.signal(finfo.signalInfo.id.name, namespace_name)) return all_frames - def frame_by_signal( - self, name: str, namespace_name: str - ) -> Sequence[common_pb2.SignalId]: + def frame_by_signal(self, name: str, namespace_name: str) -> common_pb2.SignalId: """ Get frame for the given signal. @@ -161,13 +154,9 @@ def frame_by_signal( for sinfo in finfo.childInfo: if sinfo.id.name == name: return self.signal(finfo.signalInfo.id.name, namespace_name) - raise Exception( - f"signal not declared (namespace, signal): {namespace_name} {name}" - ) + raise Exception(f"signal not declared (namespace, signal): {namespace_name} {name}") - def signals_in_frame( - self, name: str, namespace_name: str - ) -> Sequence[common_pb2.SignalId]: + def signals_in_frame(self, name: str, namespace_name: str) -> Sequence[common_pb2.SignalId]: """ Get all signals residing in the frame. @@ -183,14 +172,10 @@ def signals_in_frame( frame = finfo for sinfo in finfo.childInfo: all_signals.append(self.signal(sinfo.id.name, namespace_name)) - assert ( - frame is not None - ), f"frame {name} does not exist in namespace {namespace_name}" + assert frame is not None, f"frame {name} does not exist in namespace {namespace_name}" return all_signals - def signal_with_payload( - self, name: str, namespace_name: str, value_pair, allow_malformed: bool = False - ) -> network_api_pb2.Signal: + def signal_with_payload(self, name: str, namespace_name: str, value_pair, allow_malformed: bool = False) -> network_api_pb2.Signal: """ Create value with signal for writing. @@ -209,25 +194,15 @@ def signal_with_payload( raise Exception(f"type must be one of: {types}") if key == "raw" and allow_malformed is False: expected = meta.getSize() - assert ( - len(value) * 8 == expected - ), f"payload size missmatch, expected {expected/8} bytes" + assert len(value) * 8 == expected, f"payload size missmatch, expected {expected/8} bytes" elif key != "raw": # Check bounds if any - checkMin = meta.getMin() - if (checkMin is not None) and (value < checkMin): - _logger.warning( - 'Value below minimum value of {} for signal "{}"'.format( - checkMin, name - ) - ) - checkMax = meta.getMax() - if (checkMax is not None) and (value > checkMax): - _logger.warning( - 'Value above maximum value of {} for signal "{}"'.format( - checkMax, name - ) - ) + check_min = meta.getMin() + if (check_min is not None) and (value < check_min): + _logger.warning(f'Value below minimum value of {check_min} for signal "{name}"') + check_max = meta.getMax() + if (check_max is not None) and (value > check_max): + _logger.warning(f'Value above maximum value of {check_max} for signal "{name}"') params = {"id": signal, key: value} return network_api_pb2.Signal(**params) diff --git a/python/remotivelabs-broker/tests/test_live.py b/python/remotivelabs-broker/tests/test_live.py index 1aa1a51..a8ed9c0 100644 --- a/python/remotivelabs-broker/tests/test_live.py +++ b/python/remotivelabs-broker/tests/test_live.py @@ -1,6 +1,7 @@ -import google.protobuf import logging + import pytest + import remotivelabs.broker.sync as br # Warning these tests require a RemotiveBroker up and running @@ -18,13 +19,15 @@ def __init__(self): # Setup broker with predefined settings @pytest.fixture -def broker_connection(): +@pytest.fixture(name="broker_connection") +def fixture_broker_connection(): return Connection() # Setup broker configured for testing @pytest.fixture -def broker_configured(broker_connection): +@pytest.fixture(name="broker_configured") +def fixture_broker_configured(broker_connection): br.upload_folder(broker_connection.system_stub, "tests/configuration_udp") br.reload_configuration(broker_connection.system_stub) return broker_connection @@ -40,8 +43,8 @@ def test_meta_fields(broker_configured): sc = br.SignalCreator(broker_configured.system_stub) meta_speed = sc.get_meta("Speed", "ecu_A") parent_frame = sc.frame_by_signal("Speed", "ecu_A") - assert parent_frame.name == "PropulsionFrame" - meta_parent = sc.get_meta(parent_frame.name, "ecu_A") + assert parent_frame.name == "PropulsionFrame" # pylint: disable=no-member + meta_parent = sc.get_meta(parent_frame.name, "ecu_A") # pylint: disable=no-member assert meta_speed.getDescription() == "Current velocity" assert meta_speed.getMax() == 90.0 @@ -75,17 +78,11 @@ def test_min_max(broker_configured, caplog): # Publing a value below mininum sc.signal_with_payload("Speed", "ecu_A", ("double", -1.0)) - assert ( - caplog.records[0].message - == 'Value below minimum value of 0.0 for signal "Speed"' - ) + assert caplog.records[0].message == 'Value below minimum value of 0.0 for signal "Speed"' # Publing a value above maximum sc.signal_with_payload("Speed", "ecu_A", ("double", 91.0)) - assert ( - caplog.records[1].message - == 'Value above maximum value of 90.0 for signal "Speed"' - ) + assert caplog.records[1].message == 'Value above maximum value of 90.0 for signal "Speed"' assert len(caplog.records) == 2 diff --git a/python/remotivelabs-broker/tests/test_proto_types.py b/python/remotivelabs-broker/tests/test_proto_types.py index 96f1e6e..49a882f 100644 --- a/python/remotivelabs-broker/tests/test_proto_types.py +++ b/python/remotivelabs-broker/tests/test_proto_types.py @@ -1,4 +1,3 @@ -import google.protobuf as pb import remotivelabs.broker.sync as br # Tests @@ -6,4 +5,4 @@ def test_empty(): # Instanciate empty type - br.common_pb2.Empty + _ = br.common_pb2.Empty