diff --git a/.vscode/launch.json b/.vscode/launch.json
index 7a9220d2..c114b935 100644
--- a/.vscode/launch.json
+++ b/.vscode/launch.json
@@ -17,7 +17,8 @@
"SDV_MQTT_ADDRESS": "mqtt://localhost:1883",
"SDV_HVACSERVICE_ADDRESS": "grpc://localhost:50052",
"SDV_SEATSERVICE_ADDRESS": "grpc://localhost:50051",
- "SDV_VEHICLEDATABROKER_ADDRESS": "grpc://localhost:55555"
+ "SDV_VEHICLEDATABROKER_ADDRESS": "grpc://localhost:55555",
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "python"
}
},
{
@@ -35,7 +36,8 @@
"DAPR_HTTP_PORT": "3500",
"HVACSERVICE_DAPR_APP_ID": "hvacservice",
"SEATSERVICE_DAPR_APP_ID": "seatservice",
- "VEHICLEDATABROKER_DAPR_APP_ID": "vehicledatabroker"
+ "VEHICLEDATABROKER_DAPR_APP_ID": "vehicledatabroker",
+ "PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION": "python"
}
}
],
diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
index 2bb2f85e..b884020c 100644
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -77,19 +77,19 @@ In this project, the [pip-tools](https://github.com/jazzband/pip-tools) are used
The required pip-based dependencies of this project are defined in the [setup.py](./setup.py). All runtime dependencies are listed in the `"install_requires"` while the development dependencies are listed in the `[dev]` section of the `"extras_require"`. In other words:
* The requirements that are defined in the `"install_requires"` are only installed when using the Vehicle app SDK as a runtime dependency for vehicle app development.
-* The requirements that are defined in the `"extras_require[dev]"` are installed for the contribution of the Vehicle app SDK development within the dev-container or in a dedicated virtual environments. This list consists of all the neccessary runtime, testing and development tools packages and need to be installed before start contributing to the project.
+* The requirements that are defined in the `"extras_require[dev]"` are installed for the contribution of the Vehicle app SDK development within the dev-container or in a dedicated virtual environments. This list consists of all the necessary runtime, testing and development tools packages and need to be installed before start contributing to the project.
The process for the dependency management of this project can be summarized as following:
-* The `pip-compile` tool will generate the [requirements.txt](./requirements.txt). By executing this tools, the `"requirements.txt"` file will be updated with all underlying dependencies. The command below shall be executed everytime a new python package is added to the project and/or to bump the package versions.
+* The `pip-compile` tool will generate the [requirements.txt](./requirements.txt). By executing this tools, the `"requirements.txt"` file will be updated with all underlying dependencies. The command below shall be executed every time a new python package is added to the project and/or to bump the package versions.
```bash
pip-compile --extra=dev
```
-* Please run the `pip-compile` with `-U` flag in order to force update all packages with the latest versions. However, you need to make sure when force updating all packages that everyting works as expected.
+* Please run the `pip-compile` with `-U` flag in order to force update all packages with the latest versions. However, you need to make sure when force updating all packages that everything works as expected.
```bash
pip-compile --extra=dev -U
```
-* Run `pip-sync` or `pip install` to install the required dependencies from the [requirements.txt](./requirements.txt) alternativly.
+* Run `pip-sync` or `pip install` to install the required dependencies from the [requirements.txt](./requirements.txt) alternatively.
```bash
pip-sync
```
diff --git a/NOTICE-3RD-PARTY-CONTENT.md b/NOTICE-3RD-PARTY-CONTENT.md
index b45bfe62..c46af214 100644
--- a/NOTICE-3RD-PARTY-CONTENT.md
+++ b/NOTICE-3RD-PARTY-CONTENT.md
@@ -3,95 +3,93 @@
## Python
| Dependency | Version | License |
|:-----------|:-------:|--------:|
-|aiohttp|3.8.3|Apache 2.0|
+|aiohttp|3.8.5|Apache 2.0|
|aiosignal|1.3.1|Apache 2.0|
-|APScheduler|3.10.1|MIT|
-|async-timeout|4.0.2|Apache 2.0|
-|attrs|22.1.0|MIT|
+|APScheduler|3.10.4|MIT|
+|async-timeout|4.0.3|Apache 2.0|
+|attrs|23.1.0|MIT|
|bandit|1.7.5|Apache 2.0|
-|black|23.3.0|MIT|
+|black|23.7.0|MIT|
|build|0.10.0|MIT|
-|cachetools|5.3.0|MIT|
-|cfgv|3.3.1|MIT|
-|chardet|5.1.0|LGPL|
-|charset-normalizer|2.1.1|MIT|
-|click|8.1.3|New BSD|
+|cachetools|5.3.1|MIT|
+|cfgv|3.4.0|MIT|
+|chardet|5.2.0|LGPL|
+|charset-normalizer|3.2.0|MIT|
+|click|8.1.7|New BSD|
|cloudevents|1.9.0|Apache 2.0|
|colorama|0.4.6|BSD|
-|coverage|7.2.2|Apache 2.0|
-|dapr|1.8.3|Apache 2.0|
-|Deprecated|1.2.13|MIT|
+|coverage|7.3.0|Apache 2.0|
+|dapr|1.10.0|Apache 2.0|
+|Deprecated|1.2.14|MIT|
|deprecation|2.1.0|Apache 2.0|
-|distlib|0.3.6|Python Software Foundation License|
-|exceptiongroup|1.1.1|MIT|
-|filelock|3.10.7|The Unlicense (Unlicense)|
-|flake8|6.0.0|MIT|
-|flake8-bugbear|23.3.23|MIT|
-|frozenlist|1.3.3|Apache 2.0|
+|distlib|0.3.7|Python Software Foundation License|
+|exceptiongroup|1.1.3|MIT|
+|filelock|3.12.2|The Unlicense (Unlicense)|
+|flake8|6.1.0|MIT|
+|flake8-bugbear|23.7.10|MIT|
+|frozenlist|1.4.0|Apache 2.0|
|gitdb|4.0.10|BSD|
-|GitPython|3.1.31|BSD|
-|grpc-stubs|1.24.12.1|MIT|
-|grpcio|1.48.2|Apache 2.0|
-|grpcio-tools|1.48.2|Apache 2.0|
-|identify|2.5.22|MIT|
+|GitPython|3.1.32|BSD|
+|grpc-stubs|1.53.0.2|MIT|
+|grpcio|1.57.0|Apache 2.0|
+|grpcio-tools|1.57.0|Apache 2.0|
+|identify|2.5.27|MIT|
|idna|3.4|BSD|
|iniconfig|2.0.0|MIT|
|isort|5.12.0|MIT|
-|markdown-it-py|2.2.0|MIT|
+|markdown-it-py|3.0.0|MIT|
|mccabe|0.7.0|MIT|
|mdurl|0.1.2|MIT|
-|multidict|6.0.3|Apache 2.0|
-|mypy|1.1.1|MIT|
+|multidict|6.0.4|Apache 2.0|
+|mypy|1.5.1|MIT|
|mypy-extensions|1.0.0|MIT|
-|mypy-protobuf|3.3.0|Apache 2.0|
-|nodeenv|1.7.0|BSD|
-|opentelemetry-api|1.14.0|Apache 2.0|
-|opentelemetry-distro|0.35b0|Apache 2.0|
-|opentelemetry-instrumentation|0.35b0|Apache 2.0|
-|opentelemetry-instrumentation-logging|0.35b0|Apache 2.0|
-|opentelemetry-sdk|1.14.0|Apache 2.0|
-|opentelemetry-semantic-conventions|0.35b0|Apache 2.0|
-|packaging|23.0|Apache 2.0
BSD|
+|mypy-protobuf|3.4.0|Apache 2.0|
+|nodeenv|1.8.0|BSD|
+|opentelemetry-api|1.15.0|Apache 2.0|
+|opentelemetry-distro|0.36b0|Apache 2.0|
+|opentelemetry-instrumentation|0.36b0|Apache 2.0|
+|opentelemetry-instrumentation-logging|0.36b0|Apache 2.0|
+|opentelemetry-sdk|1.15.0|Apache 2.0|
+|opentelemetry-semantic-conventions|0.36b0|Apache 2.0|
+|packaging|23.1|Apache 2.0
BSD|
|paho-mqtt|1.6.1|OSI Approved|
-|pathspec|0.11.1|Mozilla Public License 2.0 (MPL 2.0)|
+|pathspec|0.11.2|Mozilla Public License 2.0 (MPL 2.0)|
|pbr|5.11.1|Apache 2.0|
|pip|23.2.1|MIT|
-|pip-tools|6.12.3|BSD|
-|platformdirs|3.2.0|MIT|
-|pluggy|1.0.0|MIT|
-|pre-commit|3.2.2|MIT|
-|protobuf|3.20.3|Google License|
-|pycodestyle|2.10.0|MIT|
+|pip-tools|7.3.0|BSD|
+|platformdirs|3.10.0|MIT|
+|pluggy|1.3.0|MIT|
+|pre-commit|3.3.3|MIT|
+|protobuf|4.21.12|Google License|
+|pycodestyle|2.11.0|MIT|
|pydocstyle|6.3.0|MIT|
-|pyflakes|3.0.1|MIT|
-|Pygments|2.14.0|Simplified BSD|
-|pyproject-api|1.5.1|MIT|
+|pyflakes|3.1.0|MIT|
+|Pygments|2.16.1|Simplified BSD|
+|pyproject-api|1.5.4|MIT|
|pyproject-hooks|1.0.0|MIT|
-|pytest|7.2.2|MIT|
-|pytest-asyncio|0.21.0|Apache 2.0|
-|pytest-cov|4.0.0|MIT|
+|pytest|7.4.0|MIT|
+|pytest-asyncio|0.21.1|Apache 2.0|
+|pytest-cov|4.1.0|MIT|
|python-dateutil|2.8.2|Apache 2.0
BSD|
|pytz|2023.3|MIT|
-|pytz-deprecation-shim|0.1.0.post0|Apache 2.0|
-|PyYAML|6.0|MIT|
-|rich|13.3.3|MIT|
+|PyYAML|6.0.1|MIT|
+|rich|13.5.2|MIT|
|setuptools|58.1.0|MIT|
|six|1.16.0|MIT|
|smmap|5.0.0|BSD|
|snowballstemmer|2.2.0|New BSD|
-|stevedore|5.0.0|Apache 2.0|
+|stevedore|5.1.0|Apache 2.0|
|tomli|2.0.1|MIT|
-|tox|4.4.11|MIT|
-|types-Deprecated|1.2.9.2|Apache 2.0|
-|types-mock|5.0.0.6|Apache 2.0|
-|types-protobuf|4.22.0.2|Apache 2.0|
-|typing-extensions|4.4.0|Python Software Foundation License|
-|tzdata|2023.3|Apache 2.0|
-|tzlocal|4.3|MIT|
-|virtualenv|20.21.0|MIT|
-|wheel|0.40.0|MIT|
-|wrapt|1.14.1|BSD|
-|yarl|1.8.2|Apache 2.0|
+|tox|4.10.0|MIT|
+|types-Deprecated|1.2.9.3|Apache 2.0|
+|types-mock|5.1.0.1|Apache 2.0|
+|types-protobuf|4.24.0.1|Apache 2.0|
+|typing-extensions|4.7.1|Python Software Foundation License|
+|tzlocal|5.0.1|MIT|
+|virtualenv|20.24.3|MIT|
+|wheel|0.41.2|MIT|
+|wrapt|1.15.0|BSD|
+|yarl|1.9.2|Apache 2.0|
## Workflows
| Dependency | Version | License |
|:-----------|:-------:|--------:|
diff --git a/generate-grpc-stubs.sh b/generate-grpc-stubs.sh
index 04a3bd03..22553bd1 100755
--- a/generate-grpc-stubs.sh
+++ b/generate-grpc-stubs.sh
@@ -17,6 +17,8 @@ echo "#######################################################"
echo "### Generating gRPC stubs from proto files ###"
echo "#######################################################"
+set -x
+
ROOT_DIR=$( realpath "$( cd -- "$(dirname "$0")" >/dev/null 2>&1 ; pwd -P )" )
python3 -m grpc_tools.protoc -I ./sdv/proto --grpc_python_out=./sdv/proto --python_out=./sdv/proto --mypy_out=./sdv/proto ./sdv/proto/**/*.proto
diff --git a/requirements.txt b/requirements.txt
index c9197b1e..1e5d6c47 100755
--- a/requirements.txt
+++ b/requirements.txt
@@ -4,34 +4,33 @@
#
# pip-compile --extra=dev
#
-aiohttp==3.8.3
+aiohttp==3.8.5
# via dapr
aiosignal==1.3.1
# via aiohttp
-apscheduler==3.10.1
+apscheduler==3.10.4
# via sdv (setup.py)
-async-timeout==4.0.2
+async-timeout==4.0.3
# via aiohttp
-attrs==22.1.0
+attrs==23.1.0
# via
# aiohttp
# flake8-bugbear
- # pytest
bandit==1.7.5
# via sdv (setup.py)
-black==23.3.0
+black==23.7.0
# via sdv (setup.py)
build==0.10.0
# via pip-tools
-cachetools==5.3.0
+cachetools==5.3.1
# via tox
-cfgv==3.3.1
+cfgv==3.4.0
# via pre-commit
-chardet==5.1.0
+chardet==5.2.0
# via tox
-charset-normalizer==2.1.1
+charset-normalizer==3.2.0
# via aiohttp
-click==8.1.3
+click==8.1.7
# via
# black
# pip-tools
@@ -39,49 +38,49 @@ cloudevents==1.9.0
# via sdv (setup.py)
colorama==0.4.6
# via tox
-coverage[toml]==7.2.2
+coverage[toml]==7.3.0
# via pytest-cov
-dapr==1.8.3
+dapr==1.10.0
# via sdv (setup.py)
-deprecated==1.2.13
+deprecated==1.2.14
# via
# opentelemetry-api
# sdv (setup.py)
deprecation==2.1.0
# via cloudevents
-distlib==0.3.6
+distlib==0.3.7
# via virtualenv
-exceptiongroup==1.1.1
+exceptiongroup==1.1.3
# via pytest
-filelock==3.10.7
+filelock==3.12.2
# via
# tox
# virtualenv
-flake8==6.0.0
+flake8==6.1.0
# via
# flake8-bugbear
# sdv (setup.py)
-flake8-bugbear==23.3.23
+flake8-bugbear==23.7.10
# via sdv (setup.py)
-frozenlist==1.3.3
+frozenlist==1.4.0
# via
# aiohttp
# aiosignal
gitdb==4.0.10
# via gitpython
-gitpython==3.1.31
+gitpython==3.1.32
# via bandit
-grpc-stubs==1.24.12.1
+grpc-stubs==1.53.0.2
# via sdv (setup.py)
-grpcio==1.48.2
+grpcio==1.57.0
# via
# dapr
# grpc-stubs
# grpcio-tools
# sdv (setup.py)
-grpcio-tools==1.48.2
+grpcio-tools==1.57.0
# via sdv (setup.py)
-identify==2.5.22
+identify==2.5.27
# via pre-commit
idna==3.4
# via yarl
@@ -89,48 +88,48 @@ iniconfig==2.0.0
# via pytest
isort==5.12.0
# via sdv (setup.py)
-markdown-it-py==2.2.0
+markdown-it-py==3.0.0
# via rich
mccabe==0.7.0
# via flake8
mdurl==0.1.2
# via markdown-it-py
-multidict==6.0.3
+multidict==6.0.4
# via
# aiohttp
# yarl
-mypy==1.1.1
+mypy==1.5.1
# via sdv (setup.py)
mypy-extensions==1.0.0
# via
# black
# mypy
-mypy-protobuf==3.3.0
+mypy-protobuf==3.4.0
# via sdv (setup.py)
-nodeenv==1.7.0
+nodeenv==1.8.0
# via pre-commit
-opentelemetry-api==1.14.0
+opentelemetry-api==1.15.0
# via
# opentelemetry-distro
# opentelemetry-instrumentation
# opentelemetry-instrumentation-logging
# opentelemetry-sdk
# sdv (setup.py)
-opentelemetry-distro==0.35b0
+opentelemetry-distro==0.36b0
# via sdv (setup.py)
-opentelemetry-instrumentation==0.35b0
+opentelemetry-instrumentation==0.36b0
# via
# opentelemetry-distro
# opentelemetry-instrumentation-logging
-opentelemetry-instrumentation-logging==0.35b0
+opentelemetry-instrumentation-logging==0.36b0
# via sdv (setup.py)
-opentelemetry-sdk==1.14.0
+opentelemetry-sdk==1.15.0
# via
# opentelemetry-distro
# sdv (setup.py)
-opentelemetry-semantic-conventions==0.35b0
+opentelemetry-semantic-conventions==0.36b0
# via opentelemetry-sdk
-packaging==23.0
+packaging==23.1
# via
# black
# build
@@ -140,72 +139,69 @@ packaging==23.0
# tox
paho-mqtt==1.6.1
# via sdv (setup.py)
-pathspec==0.11.1
+pathspec==0.11.2
# via black
pbr==5.11.1
# via stevedore
-pip-tools==6.12.3
+pip-tools==7.3.0
# via sdv (setup.py)
-platformdirs==3.2.0
+platformdirs==3.10.0
# via
# black
# tox
# virtualenv
-pluggy==1.0.0
+pluggy==1.3.0
# via
# pytest
# tox
-pre-commit==3.2.2
+pre-commit==3.3.3
# via sdv (setup.py)
-protobuf==3.20.3
+protobuf==4.21.12
# via
# dapr
# grpcio-tools
# mypy-protobuf
# sdv (setup.py)
-pycodestyle==2.10.0
+pycodestyle==2.11.0
# via flake8
pydocstyle==6.3.0
# via sdv (setup.py)
-pyflakes==3.0.1
+pyflakes==3.1.0
# via flake8
-pygments==2.14.0
+pygments==2.16.1
# via rich
-pyproject-api==1.5.1
+pyproject-api==1.5.4
# via tox
pyproject-hooks==1.0.0
# via build
-pytest==7.2.2
+pytest==7.4.0
# via
# pytest-asyncio
# pytest-cov
# sdv (setup.py)
-pytest-asyncio==0.21.0
+pytest-asyncio==0.21.1
# via sdv (setup.py)
-pytest-cov==4.0.0
+pytest-cov==4.1.0
# via sdv (setup.py)
python-dateutil==2.8.2
# via dapr
pytz==2023.3
# via apscheduler
-pytz-deprecation-shim==0.1.0.post0
- # via tzlocal
-pyyaml==6.0
+pyyaml==6.0.1
# via
# bandit
# pre-commit
-rich==13.3.3
+rich==13.5.2
# via bandit
six==1.16.0
# via
# apscheduler
- # grpcio
# python-dateutil
smmap==5.0.0
# via gitdb
snowballstemmer==2.2.0
# via pydocstyle
-stevedore==5.0.0
+stevedore==5.1.0
# via bandit
tomli==2.0.1
# via
@@ -213,37 +209,37 @@ tomli==2.0.1
# build
# coverage
# mypy
+ # pip-tools
# pyproject-api
# pyproject-hooks
# pytest
# tox
-tox==4.4.11
+tox==4.10.0
# via sdv (setup.py)
-types-deprecated==1.2.9.2
+types-deprecated==1.2.9.3
# via sdv (setup.py)
-types-mock==5.0.0.6
+types-mock==5.1.0.1
# via sdv (setup.py)
-types-protobuf==4.22.0.2
+types-protobuf==4.24.0.1
# via mypy-protobuf
-typing-extensions==4.4.0
+typing-extensions==4.7.1
# via
+ # dapr
# mypy
# opentelemetry-sdk
-tzdata==2023.3
- # via pytz-deprecation-shim
-tzlocal==4.3
+tzlocal==5.0.1
# via apscheduler
-virtualenv==20.21.0
+virtualenv==20.24.3
# via
# pre-commit
# tox
-wheel==0.40.0
+wheel==0.41.2
# via pip-tools
-wrapt==1.14.1
+wrapt==1.15.0
# via
# deprecated
# opentelemetry-instrumentation
-yarl==1.8.2
+yarl==1.9.2
# via aiohttp
# The following packages are considered to be unsafe in a requirements file:
diff --git a/sdv/proto/broker_pb2.py b/sdv/proto/broker_pb2.py
index 65e066f1..0a8c0a8d 100644
--- a/sdv/proto/broker_pb2.py
+++ b/sdv/proto/broker_pb2.py
@@ -1,18 +1,4 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: sdv/databroker/v1/broker.proto
"""Generated protocol buffer code."""
diff --git a/sdv/proto/broker_pb2.pyi b/sdv/proto/broker_pb2.pyi
index dd2dd45d..14a7bb08 100644
--- a/sdv/proto/broker_pb2.pyi
+++ b/sdv/proto/broker_pb2.pyi
@@ -1,203 +1,230 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
+Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
+
+This program and the accompanying materials are made available under the
+terms of the Apache License, Version 2.0 which is available at
+https://www.apache.org/licenses/LICENSE-2.0.
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+
+SPDX-License-Identifier: Apache-2.0
"""
import builtins
+import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
-import sdv.proto
-import typing
-import typing_extensions
+import sdv.proto.types_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+ import typing as typing_extensions
+else:
+ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class GetDatapointsRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
DATAPOINTS_FIELD_NUMBER: builtins.int
@property
- def datapoints(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]:
+ def datapoints(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""A list of requested data points."""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- datapoints: typing.Optional[typing.Iterable[typing.Text]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ...
+ datapoints: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["datapoints", b"datapoints"]) -> None: ...
+
global___GetDatapointsRequest = GetDatapointsRequest
class GetDatapointsReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class DatapointsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
- key: typing.Text
+ key: builtins.str
@property
def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- key: typing.Text = ...,
- value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ key: builtins.str = ...,
+ value: sdv.databroker.v1.types_pb2.Datapoint | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
DATAPOINTS_FIELD_NUMBER: builtins.int
@property
- def datapoints(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]:
+ def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, sdv.databroker.v1.types_pb2.Datapoint]:
"""Contains the values of the requested data points.
If a requested data point is not available, the corresponding Datapoint
will have the respective failure value set.
"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- datapoints: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ...
+ datapoints: collections.abc.Mapping[builtins.str, sdv.databroker.v1.types_pb2.Datapoint] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["datapoints", b"datapoints"]) -> None: ...
+
global___GetDatapointsReply = GetDatapointsReply
class SetDatapointsRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class DatapointsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
- key: typing.Text
+ key: builtins.str
@property
def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- key: typing.Text = ...,
- value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ key: builtins.str = ...,
+ value: sdv.databroker.v1.types_pb2.Datapoint | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
DATAPOINTS_FIELD_NUMBER: builtins.int
@property
- def datapoints(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]:
+ def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, sdv.databroker.v1.types_pb2.Datapoint]:
"""A map of data points to set"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- datapoints: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ...
+ datapoints: collections.abc.Mapping[builtins.str, sdv.databroker.v1.types_pb2.Datapoint] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["datapoints", b"datapoints"]) -> None: ...
+
global___SetDatapointsRequest = SetDatapointsRequest
class SetDatapointsReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class ErrorsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
- key: typing.Text
+ key: builtins.str
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType
- def __init__(self,
+ def __init__(
+ self,
*,
- key: typing.Text = ...,
+ key: builtins.str = ...,
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
ERRORS_FIELD_NUMBER: builtins.int
@property
- def errors(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, sdv.databroker.v1.types_pb2.DatapointError.ValueType]:
+ def errors(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, sdv.databroker.v1.types_pb2.DatapointError.ValueType]:
"""A map of errors (if any)"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- errors: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.DatapointError.ValueType]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["errors",b"errors"]) -> None: ...
+ errors: collections.abc.Mapping[builtins.str, sdv.databroker.v1.types_pb2.DatapointError.ValueType] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["errors", b"errors"]) -> None: ...
+
global___SetDatapointsReply = SetDatapointsReply
class SubscribeRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
QUERY_FIELD_NUMBER: builtins.int
- query: typing.Text
+ query: builtins.str
"""Subscribe to a set of data points (or expressions) described
by the provided query.
The query syntax is a subset of SQL and is described in more
detail in the QUERY.md file.
"""
-
- def __init__(self,
+ def __init__(
+ self,
*,
- query: typing.Text = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["query",b"query"]) -> None: ...
+ query: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["query", b"query"]) -> None: ...
+
global___SubscribeRequest = SubscribeRequest
class SubscribeReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class FieldsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
- key: typing.Text
+ key: builtins.str
@property
def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- key: typing.Text = ...,
- value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ key: builtins.str = ...,
+ value: sdv.databroker.v1.types_pb2.Datapoint | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
FIELDS_FIELD_NUMBER: builtins.int
@property
- def fields(self) -> google.protobuf.internal.containers.MessageMap[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]:
+ def fields(self) -> google.protobuf.internal.containers.MessageMap[builtins.str, sdv.databroker.v1.types_pb2.Datapoint]:
"""Contains the fields specified by the query.
If a requested data point value is not available, the corresponding
Datapoint will have it's respective failure value set.
"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- fields: typing.Optional[typing.Mapping[typing.Text, sdv.databroker.v1.types_pb2.Datapoint]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["fields",b"fields"]) -> None: ...
+ fields: collections.abc.Mapping[builtins.str, sdv.databroker.v1.types_pb2.Datapoint] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["fields", b"fields"]) -> None: ...
+
global___SubscribeReply = SubscribeReply
class GetMetadataRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
NAMES_FIELD_NUMBER: builtins.int
@property
- def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]:
+ def names(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]:
"""Request metadata for a list of data points referenced by their names.
e.g. "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed".
-
+
If no names are provided, metadata for all known data points will be
returned.
"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- names: typing.Optional[typing.Iterable[typing.Text]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["names",b"names"]) -> None: ...
+ names: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["names", b"names"]) -> None: ...
+
global___GetMetadataRequest = GetMetadataRequest
class GetMetadataReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
LIST_FIELD_NUMBER: builtins.int
@property
def list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[sdv.databroker.v1.types_pb2.Metadata]:
@@ -205,10 +232,11 @@ class GetMetadataReply(google.protobuf.message.Message):
doesn't exist (i.e. not known to the Data Broker) the corresponding
Metadata isn't part of the returned list.
"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- list: typing.Optional[typing.Iterable[sdv.databroker.v1.types_pb2.Metadata]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["list",b"list"]) -> None: ...
+ list: collections.abc.Iterable[sdv.databroker.v1.types_pb2.Metadata] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["list", b"list"]) -> None: ...
+
global___GetMetadataReply = GetMetadataReply
diff --git a/sdv/proto/broker_pb2_grpc.py b/sdv/proto/broker_pb2_grpc.py
index 89a20836..d173d8ad 100644
--- a/sdv/proto/broker_pb2_grpc.py
+++ b/sdv/proto/broker_pb2_grpc.py
@@ -1,17 +1,3 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
@@ -55,9 +41,9 @@ class BrokerServicer(object):
def GetDatapoints(self, request, context):
"""Request a set of datapoints (values)
-
+
Returns a list of requested data points.
-
+
InvalidArgument is returned if the request is malformed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -74,9 +60,9 @@ def SetDatapoints(self, request, context):
def Subscribe(self, request, context):
"""Subscribe to a set of data points or conditional expressions
using the Data Broker Query Syntax (described in QUERY.md)
-
+
Returns a stream of replies.
-
+
InvalidArgument is returned if the request is malformed.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
@@ -85,7 +71,7 @@ def Subscribe(self, request, context):
def GetMetadata(self, request, context):
"""Request the metadata of a set of datapoints
-
+
Returns metadata of the requested data points that exist.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
diff --git a/sdv/proto/collector_pb2.py b/sdv/proto/collector_pb2.py
index a527d12d..63388f0d 100644
--- a/sdv/proto/collector_pb2.py
+++ b/sdv/proto/collector_pb2.py
@@ -1,18 +1,4 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: sdv/databroker/v1/collector.proto
"""Generated protocol buffer code."""
diff --git a/sdv/proto/collector_pb2.pyi b/sdv/proto/collector_pb2.pyi
index 708f829e..143672bc 100644
--- a/sdv/proto/collector_pb2.pyi
+++ b/sdv/proto/collector_pb2.pyi
@@ -1,198 +1,229 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
+Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
+
+This program and the accompanying materials are made available under the
+terms of the Apache License, Version 2.0 which is available at
+https://www.apache.org/licenses/LICENSE-2.0.
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+
+SPDX-License-Identifier: Apache-2.0
"""
import builtins
+import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.message
-import sdv.proto
-import typing
-import typing_extensions
+import sdv.proto.types_pb2
+import sys
+
+if sys.version_info >= (3, 8):
+ import typing as typing_extensions
+else:
+ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class UpdateDatapointsRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class DatapointsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
@property
def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ...
- def __init__(self,
+ def __init__(
+ self,
*,
key: builtins.int = ...,
- value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ value: sdv.databroker.v1.types_pb2.Datapoint | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
DATAPOINTS_FIELD_NUMBER: builtins.int
@property
def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- datapoints: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ...
+ datapoints: collections.abc.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["datapoints", b"datapoints"]) -> None: ...
+
global___UpdateDatapointsRequest = UpdateDatapointsRequest
class UpdateDatapointsReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class ErrorsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType
- def __init__(self,
+ def __init__(
+ self,
*,
key: builtins.int = ...,
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
ERRORS_FIELD_NUMBER: builtins.int
@property
def errors(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]:
"""If empty, everything went well"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- errors: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["errors",b"errors"]) -> None: ...
+ errors: collections.abc.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["errors", b"errors"]) -> None: ...
+
global___UpdateDatapointsReply = UpdateDatapointsReply
class StreamDatapointsRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class DatapointsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
@property
def value(self) -> sdv.databroker.v1.types_pb2.Datapoint: ...
- def __init__(self,
+ def __init__(
+ self,
*,
key: builtins.int = ...,
- value: typing.Optional[sdv.databroker.v1.types_pb2.Datapoint] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ value: sdv.databroker.v1.types_pb2.Datapoint | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
DATAPOINTS_FIELD_NUMBER: builtins.int
@property
def datapoints(self) -> google.protobuf.internal.containers.MessageMap[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- datapoints: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["datapoints",b"datapoints"]) -> None: ...
+ datapoints: collections.abc.Mapping[builtins.int, sdv.databroker.v1.types_pb2.Datapoint] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["datapoints", b"datapoints"]) -> None: ...
+
global___StreamDatapointsRequest = StreamDatapointsRequest
class StreamDatapointsReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class ErrorsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
key: builtins.int
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType
- def __init__(self,
+ def __init__(
+ self,
*,
key: builtins.int = ...,
value: sdv.databroker.v1.types_pb2.DatapointError.ValueType = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
ERRORS_FIELD_NUMBER: builtins.int
@property
def errors(self) -> google.protobuf.internal.containers.ScalarMap[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]:
"""If empty, everything went well"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- errors: typing.Optional[typing.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["errors",b"errors"]) -> None: ...
+ errors: collections.abc.Mapping[builtins.int, sdv.databroker.v1.types_pb2.DatapointError.ValueType] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["errors", b"errors"]) -> None: ...
+
global___StreamDatapointsReply = StreamDatapointsReply
class RegisterDatapointsRequest(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
LIST_FIELD_NUMBER: builtins.int
@property
def list(self) -> google.protobuf.internal.containers.RepeatedCompositeFieldContainer[global___RegistrationMetadata]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- list: typing.Optional[typing.Iterable[global___RegistrationMetadata]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["list",b"list"]) -> None: ...
+ list: collections.abc.Iterable[global___RegistrationMetadata] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["list", b"list"]) -> None: ...
+
global___RegisterDatapointsRequest = RegisterDatapointsRequest
class RegistrationMetadata(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
NAME_FIELD_NUMBER: builtins.int
DATA_TYPE_FIELD_NUMBER: builtins.int
DESCRIPTION_FIELD_NUMBER: builtins.int
CHANGE_TYPE_FIELD_NUMBER: builtins.int
- name: typing.Text
+ name: builtins.str
"""Name of the data point
(e.g. "Vehicle.Cabin.Seat.Row1.Pos1.Position" or "Vehicle.Speed")
"""
-
data_type: sdv.databroker.v1.types_pb2.DataType.ValueType
- description: typing.Text
+ description: builtins.str
change_type: sdv.databroker.v1.types_pb2.ChangeType.ValueType
- def __init__(self,
+ def __init__(
+ self,
*,
- name: typing.Text = ...,
+ name: builtins.str = ...,
data_type: sdv.databroker.v1.types_pb2.DataType.ValueType = ...,
- description: typing.Text = ...,
+ description: builtins.str = ...,
change_type: sdv.databroker.v1.types_pb2.ChangeType.ValueType = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["change_type",b"change_type","data_type",b"data_type","description",b"description","name",b"name"]) -> None: ...
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["change_type", b"change_type", "data_type", b"data_type", "description", b"description", "name", b"name"]) -> None: ...
+
global___RegistrationMetadata = RegistrationMetadata
class RegisterDatapointsReply(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class ResultsEntry(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
KEY_FIELD_NUMBER: builtins.int
VALUE_FIELD_NUMBER: builtins.int
- key: typing.Text
+ key: builtins.str
value: builtins.int
- def __init__(self,
+ def __init__(
+ self,
*,
- key: typing.Text = ...,
+ key: builtins.str = ...,
value: builtins.int = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["key",b"key","value",b"value"]) -> None: ...
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["key", b"key", "value", b"value"]) -> None: ...
RESULTS_FIELD_NUMBER: builtins.int
@property
- def results(self) -> google.protobuf.internal.containers.ScalarMap[typing.Text, builtins.int]:
+ def results(self) -> google.protobuf.internal.containers.ScalarMap[builtins.str, builtins.int]:
"""Maps each data point name passed in RegisterDatapointsRequest to a data point id"""
- pass
- def __init__(self,
+ def __init__(
+ self,
*,
- results: typing.Optional[typing.Mapping[typing.Text, builtins.int]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["results",b"results"]) -> None: ...
+ results: collections.abc.Mapping[builtins.str, builtins.int] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["results", b"results"]) -> None: ...
+
global___RegisterDatapointsReply = RegisterDatapointsReply
diff --git a/sdv/proto/collector_pb2_grpc.py b/sdv/proto/collector_pb2_grpc.py
index 53351e2c..4867ded5 100644
--- a/sdv/proto/collector_pb2_grpc.py
+++ b/sdv/proto/collector_pb2_grpc.py
@@ -1,17 +1,3 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
@@ -50,7 +36,7 @@ class CollectorServicer(object):
def RegisterDatapoints(self, request, context):
"""Register new datapoint (metadata)
-
+
If the registration of at least one of the passed data point fails, the overall registration
is rejected and the gRPC status code ABORTED is returned (to indicate the "aborted" registration).
The details, which data point(s) caused the failure and the reason, is passed in back in human-
@@ -70,14 +56,14 @@ def UpdateDatapoints(self, request, context):
"""Provide a set of updated datapoint values to the broker.
This is the unary equivalent of `StreamDatapoints` below and is better suited for cases
where the frequency of updates is rather low.
-
+
NOTE: The values provided in a single request are handled as a single update in the
data broker. This ensures that any clients requesting (or subscribing to) a set of
datapoints will get a consistent update, i.e. that either all values are updated or
none are.
-
+
Returns: any errors encountered updating the datapoints
-
+
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
@@ -87,14 +73,14 @@ def StreamDatapoints(self, request_iterator, context):
"""Provide a stream with updated datapoint values to the broker.
This is the streaming equivalent of `UpdateDatapoints` above and is better suited for
cases where the frequency of updates is high.
-
+
NOTE: The values provided in a single request are handled as a single update in the
data broker. This ensures that any clients requesting (or subscribing to) a set of
datapoints will get a consistent update, i.e. that either all values are updated or
none are.
-
+
Returns: any errors encountered updating the datapoints
-
+
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
diff --git a/sdv/proto/types_pb2.py b/sdv/proto/types_pb2.py
index eee2d94a..7cf9bc4f 100644
--- a/sdv/proto/types_pb2.py
+++ b/sdv/proto/types_pb2.py
@@ -1,18 +1,4 @@
# -*- coding: utf-8 -*-
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: sdv/databroker/v1/types.proto
"""Generated protocol buffer code."""
diff --git a/sdv/proto/types_pb2.pyi b/sdv/proto/types_pb2.pyi
index c4146aa4..4ee77351 100644
--- a/sdv/proto/types_pb2.pyi
+++ b/sdv/proto/types_pb2.pyi
@@ -1,36 +1,42 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
"""
@generated by mypy-protobuf. Do not edit manually!
isort:skip_file
+Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
+
+This program and the accompanying materials are made available under the
+terms of the Apache License, Version 2.0 which is available at
+https://www.apache.org/licenses/LICENSE-2.0.
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
+WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
+License for the specific language governing permissions and limitations
+under the License.
+
+SPDX-License-Identifier: Apache-2.0
"""
import builtins
+import collections.abc
import google.protobuf.descriptor
import google.protobuf.internal.containers
import google.protobuf.internal.enum_type_wrapper
import google.protobuf.message
import google.protobuf.timestamp_pb2
+import sys
import typing
-import typing_extensions
+
+if sys.version_info >= (3, 10):
+ import typing as typing_extensions
+else:
+ import typing_extensions
DESCRIPTOR: google.protobuf.descriptor.FileDescriptor
class _DataType:
- ValueType = typing.NewType('ValueType', builtins.int)
+ ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
-class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataType.ValueType], builtins.type):
+
+class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DataType.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
STRING: _DataType.ValueType # 0
BOOL: _DataType.ValueType # 1
@@ -58,13 +64,13 @@ class _DataTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumT
FLOAT_ARRAY: _DataType.ValueType # 30
DOUBLE_ARRAY: _DataType.ValueType # 31
TIMESTAMP_ARRAY: _DataType.ValueType # 32
+
class DataType(_DataType, metaclass=_DataTypeEnumTypeWrapper):
"""Data type of a signal
-
+
Protobuf doesn't support int8, int16, uint8 or uint16.
These are mapped to sint32 and uint32 respectively.
"""
- pass
STRING: DataType.ValueType # 0
BOOL: DataType.ValueType # 1
@@ -94,19 +100,19 @@ DOUBLE_ARRAY: DataType.ValueType # 31
TIMESTAMP_ARRAY: DataType.ValueType # 32
global___DataType = DataType
-
class _DatapointError:
- ValueType = typing.NewType('ValueType', builtins.int)
+ ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
-class _DatapointErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DatapointError.ValueType], builtins.type):
+
+class _DatapointErrorEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_DatapointError.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
UNKNOWN_DATAPOINT: _DatapointError.ValueType # 0
INVALID_TYPE: _DatapointError.ValueType # 1
ACCESS_DENIED: _DatapointError.ValueType # 2
INTERNAL_ERROR: _DatapointError.ValueType # 3
OUT_OF_BOUNDS: _DatapointError.ValueType # 4
-class DatapointError(_DatapointError, metaclass=_DatapointErrorEnumTypeWrapper):
- pass
+
+class DatapointError(_DatapointError, metaclass=_DatapointErrorEnumTypeWrapper): ...
UNKNOWN_DATAPOINT: DatapointError.ValueType # 0
INVALID_TYPE: DatapointError.ValueType # 1
@@ -115,177 +121,185 @@ INTERNAL_ERROR: DatapointError.ValueType # 3
OUT_OF_BOUNDS: DatapointError.ValueType # 4
global___DatapointError = DatapointError
-
class _ChangeType:
- ValueType = typing.NewType('ValueType', builtins.int)
+ ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
-class _ChangeTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ChangeType.ValueType], builtins.type):
+
+class _ChangeTypeEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[_ChangeType.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
STATIC: _ChangeType.ValueType # 0
"""Value never changes"""
-
ON_CHANGE: _ChangeType.ValueType # 1
"""Updates are provided every time the value changes (i.e."""
-
CONTINUOUS: _ChangeType.ValueType # 2
"""window is open / closed)
Value is updated continuously. Broker needs to tell
"""
-class ChangeType(_ChangeType, metaclass=_ChangeTypeEnumTypeWrapper):
- pass
+class ChangeType(_ChangeType, metaclass=_ChangeTypeEnumTypeWrapper): ...
STATIC: ChangeType.ValueType # 0
"""Value never changes"""
-
ON_CHANGE: ChangeType.ValueType # 1
"""Updates are provided every time the value changes (i.e."""
-
CONTINUOUS: ChangeType.ValueType # 2
"""window is open / closed)
Value is updated continuously. Broker needs to tell
"""
-
global___ChangeType = ChangeType
-
class StringArray(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
- def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[typing.Text]: ...
- def __init__(self,
+ def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.str]: ...
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[typing.Text]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.str] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___StringArray = StringArray
class BoolArray(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.bool]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.bool]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.bool] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___BoolArray = BoolArray
class Int32Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.int]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.int] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___Int32Array = Int32Array
class Int64Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.int]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.int] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___Int64Array = Int64Array
class Uint32Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.int]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.int] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___Uint32Array = Uint32Array
class Uint64Array(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.int]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.int]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.int] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___Uint64Array = Uint64Array
class FloatArray(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.float]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.float] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___FloatArray = FloatArray
class DoubleArray(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
VALUES_FIELD_NUMBER: builtins.int
@property
def values(self) -> google.protobuf.internal.containers.RepeatedScalarFieldContainer[builtins.float]: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- values: typing.Optional[typing.Iterable[builtins.float]] = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["values",b"values"]) -> None: ...
+ values: collections.abc.Iterable[builtins.float] | None = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["values", b"values"]) -> None: ...
+
global___DoubleArray = DoubleArray
class Datapoint(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
class _Failure:
- ValueType = typing.NewType('ValueType', builtins.int)
+ ValueType = typing.NewType("ValueType", builtins.int)
V: typing_extensions.TypeAlias = ValueType
- class _FailureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Datapoint._Failure.ValueType], builtins.type):
+
+ class _FailureEnumTypeWrapper(google.protobuf.internal.enum_type_wrapper._EnumTypeWrapper[Datapoint._Failure.ValueType], builtins.type): # noqa: F821
DESCRIPTOR: google.protobuf.descriptor.EnumDescriptor
INVALID_VALUE: Datapoint._Failure.ValueType # 0
"""The data point is known, but doesn't have a valid value"""
-
NOT_AVAILABLE: Datapoint._Failure.ValueType # 1
"""The data point is known, but no value is available"""
-
UNKNOWN_DATAPOINT: Datapoint._Failure.ValueType # 2
"""Unknown datapoint"""
-
ACCESS_DENIED: Datapoint._Failure.ValueType # 3
"""Access denied"""
-
INTERNAL_ERROR: Datapoint._Failure.ValueType # 4
"""Unexpected internal error"""
- class Failure(_Failure, metaclass=_FailureEnumTypeWrapper):
- pass
-
+ class Failure(_Failure, metaclass=_FailureEnumTypeWrapper): ...
INVALID_VALUE: Datapoint.Failure.ValueType # 0
"""The data point is known, but doesn't have a valid value"""
-
NOT_AVAILABLE: Datapoint.Failure.ValueType # 1
"""The data point is known, but no value is available"""
-
UNKNOWN_DATAPOINT: Datapoint.Failure.ValueType # 2
"""Unknown datapoint"""
-
ACCESS_DENIED: Datapoint.Failure.ValueType # 3
"""Access denied"""
-
INTERNAL_ERROR: Datapoint.Failure.ValueType # 4
"""Unexpected internal error"""
-
TIMESTAMP_FIELD_NUMBER: builtins.int
FAILURE_VALUE_FIELD_NUMBER: builtins.int
STRING_VALUE_FIELD_NUMBER: builtins.int
@@ -307,9 +321,8 @@ class Datapoint(google.protobuf.message.Message):
@property
def timestamp(self) -> google.protobuf.timestamp_pb2.Timestamp:
"""Timestamp of the value"""
- pass
failure_value: global___Datapoint.Failure.ValueType
- string_value: typing.Text
+ string_value: builtins.str
bool_value: builtins.bool
int32_value: builtins.int
int64_value: builtins.int
@@ -333,11 +346,12 @@ class Datapoint(google.protobuf.message.Message):
def float_array(self) -> global___FloatArray: ...
@property
def double_array(self) -> global___DoubleArray: ...
- def __init__(self,
+ def __init__(
+ self,
*,
- timestamp: typing.Optional[google.protobuf.timestamp_pb2.Timestamp] = ...,
+ timestamp: google.protobuf.timestamp_pb2.Timestamp | None = ...,
failure_value: global___Datapoint.Failure.ValueType = ...,
- string_value: typing.Text = ...,
+ string_value: builtins.str = ...,
bool_value: builtins.bool = ...,
int32_value: builtins.int = ...,
int64_value: builtins.int = ...,
@@ -345,22 +359,24 @@ class Datapoint(google.protobuf.message.Message):
uint64_value: builtins.int = ...,
float_value: builtins.float = ...,
double_value: builtins.float = ...,
- string_array: typing.Optional[global___StringArray] = ...,
- bool_array: typing.Optional[global___BoolArray] = ...,
- int32_array: typing.Optional[global___Int32Array] = ...,
- int64_array: typing.Optional[global___Int64Array] = ...,
- uint32_array: typing.Optional[global___Uint32Array] = ...,
- uint64_array: typing.Optional[global___Uint64Array] = ...,
- float_array: typing.Optional[global___FloatArray] = ...,
- double_array: typing.Optional[global___DoubleArray] = ...,
- ) -> None: ...
- def HasField(self, field_name: typing_extensions.Literal["bool_array",b"bool_array","bool_value",b"bool_value","double_array",b"double_array","double_value",b"double_value","failure_value",b"failure_value","float_array",b"float_array","float_value",b"float_value","int32_array",b"int32_array","int32_value",b"int32_value","int64_array",b"int64_array","int64_value",b"int64_value","string_array",b"string_array","string_value",b"string_value","timestamp",b"timestamp","uint32_array",b"uint32_array","uint32_value",b"uint32_value","uint64_array",b"uint64_array","uint64_value",b"uint64_value","value",b"value"]) -> builtins.bool: ...
- def ClearField(self, field_name: typing_extensions.Literal["bool_array",b"bool_array","bool_value",b"bool_value","double_array",b"double_array","double_value",b"double_value","failure_value",b"failure_value","float_array",b"float_array","float_value",b"float_value","int32_array",b"int32_array","int32_value",b"int32_value","int64_array",b"int64_array","int64_value",b"int64_value","string_array",b"string_array","string_value",b"string_value","timestamp",b"timestamp","uint32_array",b"uint32_array","uint32_value",b"uint32_value","uint64_array",b"uint64_array","uint64_value",b"uint64_value","value",b"value"]) -> None: ...
- def WhichOneof(self, oneof_group: typing_extensions.Literal["value",b"value"]) -> typing.Optional[typing_extensions.Literal["failure_value","string_value","bool_value","int32_value","int64_value","uint32_value","uint64_value","float_value","double_value","string_array","bool_array","int32_array","int64_array","uint32_array","uint64_array","float_array","double_array"]]: ...
+ string_array: global___StringArray | None = ...,
+ bool_array: global___BoolArray | None = ...,
+ int32_array: global___Int32Array | None = ...,
+ int64_array: global___Int64Array | None = ...,
+ uint32_array: global___Uint32Array | None = ...,
+ uint64_array: global___Uint64Array | None = ...,
+ float_array: global___FloatArray | None = ...,
+ double_array: global___DoubleArray | None = ...,
+ ) -> None: ...
+ def HasField(self, field_name: typing_extensions.Literal["bool_array", b"bool_array", "bool_value", b"bool_value", "double_array", b"double_array", "double_value", b"double_value", "failure_value", b"failure_value", "float_array", b"float_array", "float_value", b"float_value", "int32_array", b"int32_array", "int32_value", b"int32_value", "int64_array", b"int64_array", "int64_value", b"int64_value", "string_array", b"string_array", "string_value", b"string_value", "timestamp", b"timestamp", "uint32_array", b"uint32_array", "uint32_value", b"uint32_value", "uint64_array", b"uint64_array", "uint64_value", b"uint64_value", "value", b"value"]) -> builtins.bool: ...
+ def ClearField(self, field_name: typing_extensions.Literal["bool_array", b"bool_array", "bool_value", b"bool_value", "double_array", b"double_array", "double_value", b"double_value", "failure_value", b"failure_value", "float_array", b"float_array", "float_value", b"float_value", "int32_array", b"int32_array", "int32_value", b"int32_value", "int64_array", b"int64_array", "int64_value", b"int64_value", "string_array", b"string_array", "string_value", b"string_value", "timestamp", b"timestamp", "uint32_array", b"uint32_array", "uint32_value", b"uint32_value", "uint64_array", b"uint64_array", "uint64_value", b"uint64_value", "value", b"value"]) -> None: ...
+ def WhichOneof(self, oneof_group: typing_extensions.Literal["value", b"value"]) -> typing_extensions.Literal["failure_value", "string_value", "bool_value", "int32_value", "int64_value", "uint32_value", "uint64_value", "float_value", "double_value", "string_array", "bool_array", "int32_array", "int64_array", "uint32_array", "uint64_array", "float_array", "double_array"] | None: ...
+
global___Datapoint = Datapoint
class Metadata(google.protobuf.message.Message):
DESCRIPTOR: google.protobuf.descriptor.Descriptor
+
ID_FIELD_NUMBER: builtins.int
NAME_FIELD_NUMBER: builtins.int
DATA_TYPE_FIELD_NUMBER: builtins.int
@@ -370,20 +386,20 @@ class Metadata(google.protobuf.message.Message):
"""Id to be used in "get" and "subscribe" requests. Ids stay valid during
one power cycle, only.
"""
-
- name: typing.Text
+ name: builtins.str
data_type: global___DataType.ValueType
change_type: global___ChangeType.ValueType
"""CONTINUOUS or STATIC or ON_CHANGE"""
-
- description: typing.Text
- def __init__(self,
+ description: builtins.str
+ def __init__(
+ self,
*,
id: builtins.int = ...,
- name: typing.Text = ...,
+ name: builtins.str = ...,
data_type: global___DataType.ValueType = ...,
change_type: global___ChangeType.ValueType = ...,
- description: typing.Text = ...,
- ) -> None: ...
- def ClearField(self, field_name: typing_extensions.Literal["change_type",b"change_type","data_type",b"data_type","description",b"description","id",b"id","name",b"name"]) -> None: ...
+ description: builtins.str = ...,
+ ) -> None: ...
+ def ClearField(self, field_name: typing_extensions.Literal["change_type", b"change_type", "data_type", b"data_type", "description", b"description", "id", b"id", "name", b"name"]) -> None: ...
+
global___Metadata = Metadata
diff --git a/sdv/proto/types_pb2_grpc.py b/sdv/proto/types_pb2_grpc.py
index 66840c09..2daafffe 100644
--- a/sdv/proto/types_pb2_grpc.py
+++ b/sdv/proto/types_pb2_grpc.py
@@ -1,17 +1,3 @@
-# Copyright (c) 2022 Robert Bosch GmbH and Microsoft Corporation
-#
-# This program and the accompanying materials are made available under the
-# terms of the Apache License, Version 2.0 which is available at
-# https://www.apache.org/licenses/LICENSE-2.0.
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
-# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
-# License for the specific language governing permissions and limitations
-# under the License.
-#
-# SPDX-License-Identifier: Apache-2.0
-
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
diff --git a/setup.py b/setup.py
index b72bc1dc..82ce27d6 100644
--- a/setup.py
+++ b/setup.py
@@ -15,9 +15,9 @@
from setuptools import setup
requirements = [
- "grpcio>=1.44.0",
+ "grpcio>=1.56.0",
"protobuf>=3.19.4",
- "dapr>=1.6.0",
+ "dapr",
"paho-mqtt>=1.6.1",
"opentelemetry-distro<=0.36b0",
"opentelemetry-instrumentation-logging<=0.36b0",
@@ -31,14 +31,12 @@
# Runtime Packages
##########################################
"protobuf",
- "grpcio",
+ "grpcio>=1.56.0",
"dapr",
"cloudevents",
- # because of the restriction in dapr 1.8.3, remove when it fixed in dapr
- "grpcio-tools<=1.48.2",
+ "grpcio-tools",
"grpc-stubs",
- # because of the restriction in dapr 1.8.3, remove when it fixed in dapr
- "mypy-protobuf<=3.3.0",
+ "mypy-protobuf",
"apscheduler",
"Deprecated",
"types-Deprecated",
@@ -70,7 +68,7 @@
setup(
name="sdv",
- version="0.10.2",
+ version="0.11.0",
description="A Python SDK for Vehicle app",
long_description=long_description,
long_description_content_type="text/markdown",