remotivelabs-cli 0.5.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- remotivelabs/cli/__init__.py +0 -0
- remotivelabs/cli/api/cloud/tokens.py +62 -0
- remotivelabs/cli/broker/__init__.py +33 -0
- remotivelabs/cli/broker/defaults.py +1 -0
- remotivelabs/cli/broker/discovery.py +43 -0
- remotivelabs/cli/broker/export.py +92 -0
- remotivelabs/cli/broker/files.py +119 -0
- remotivelabs/cli/broker/lib/__about__.py +4 -0
- remotivelabs/cli/broker/lib/broker.py +625 -0
- remotivelabs/cli/broker/lib/client.py +224 -0
- remotivelabs/cli/broker/lib/helper.py +277 -0
- remotivelabs/cli/broker/lib/signalcreator.py +196 -0
- remotivelabs/cli/broker/license_flows.py +167 -0
- remotivelabs/cli/broker/licenses.py +98 -0
- remotivelabs/cli/broker/playback.py +117 -0
- remotivelabs/cli/broker/record.py +41 -0
- remotivelabs/cli/broker/recording_session/__init__.py +3 -0
- remotivelabs/cli/broker/recording_session/client.py +67 -0
- remotivelabs/cli/broker/recording_session/cmd.py +254 -0
- remotivelabs/cli/broker/recording_session/time.py +49 -0
- remotivelabs/cli/broker/scripting.py +129 -0
- remotivelabs/cli/broker/signals.py +220 -0
- remotivelabs/cli/broker/version.py +31 -0
- remotivelabs/cli/cloud/__init__.py +17 -0
- remotivelabs/cli/cloud/auth/__init__.py +3 -0
- remotivelabs/cli/cloud/auth/cmd.py +128 -0
- remotivelabs/cli/cloud/auth/login.py +283 -0
- remotivelabs/cli/cloud/auth_tokens.py +149 -0
- remotivelabs/cli/cloud/brokers.py +109 -0
- remotivelabs/cli/cloud/configs.py +109 -0
- remotivelabs/cli/cloud/licenses/__init__.py +0 -0
- remotivelabs/cli/cloud/licenses/cmd.py +14 -0
- remotivelabs/cli/cloud/organisations.py +112 -0
- remotivelabs/cli/cloud/projects.py +44 -0
- remotivelabs/cli/cloud/recordings.py +580 -0
- remotivelabs/cli/cloud/recordings_playback.py +274 -0
- remotivelabs/cli/cloud/resumable_upload.py +87 -0
- remotivelabs/cli/cloud/sample_recordings.py +25 -0
- remotivelabs/cli/cloud/service_account_tokens.py +62 -0
- remotivelabs/cli/cloud/service_accounts.py +72 -0
- remotivelabs/cli/cloud/storage/__init__.py +5 -0
- remotivelabs/cli/cloud/storage/cmd.py +76 -0
- remotivelabs/cli/cloud/storage/copy.py +86 -0
- remotivelabs/cli/cloud/storage/uri_or_path.py +45 -0
- remotivelabs/cli/cloud/uri.py +113 -0
- remotivelabs/cli/connect/__init__.py +0 -0
- remotivelabs/cli/connect/connect.py +118 -0
- remotivelabs/cli/connect/protopie/protopie.py +185 -0
- remotivelabs/cli/py.typed +0 -0
- remotivelabs/cli/remotive.py +123 -0
- remotivelabs/cli/settings/__init__.py +20 -0
- remotivelabs/cli/settings/config_file.py +113 -0
- remotivelabs/cli/settings/core.py +333 -0
- remotivelabs/cli/settings/migration/__init__.py +0 -0
- remotivelabs/cli/settings/migration/migrate_all_token_files.py +80 -0
- remotivelabs/cli/settings/migration/migrate_config_file.py +64 -0
- remotivelabs/cli/settings/migration/migrate_legacy_dirs.py +50 -0
- remotivelabs/cli/settings/migration/migrate_token_file.py +52 -0
- remotivelabs/cli/settings/migration/migration_tools.py +38 -0
- remotivelabs/cli/settings/state_file.py +67 -0
- remotivelabs/cli/settings/token_file.py +128 -0
- remotivelabs/cli/tools/__init__.py +0 -0
- remotivelabs/cli/tools/can/__init__.py +0 -0
- remotivelabs/cli/tools/can/can.py +78 -0
- remotivelabs/cli/tools/tools.py +9 -0
- remotivelabs/cli/topology/__init__.py +28 -0
- remotivelabs/cli/topology/all.py +322 -0
- remotivelabs/cli/topology/cli/__init__.py +3 -0
- remotivelabs/cli/topology/cli/run_in_docker.py +58 -0
- remotivelabs/cli/topology/cli/topology_cli.py +16 -0
- remotivelabs/cli/topology/cmd.py +130 -0
- remotivelabs/cli/topology/start_trial.py +134 -0
- remotivelabs/cli/typer/__init__.py +0 -0
- remotivelabs/cli/typer/typer_utils.py +27 -0
- remotivelabs/cli/utils/__init__.py +0 -0
- remotivelabs/cli/utils/console.py +99 -0
- remotivelabs/cli/utils/rest_helper.py +369 -0
- remotivelabs/cli/utils/time.py +11 -0
- remotivelabs/cli/utils/versions.py +120 -0
- remotivelabs_cli-0.5.0a1.dist-info/METADATA +51 -0
- remotivelabs_cli-0.5.0a1.dist-info/RECORD +84 -0
- remotivelabs_cli-0.5.0a1.dist-info/WHEEL +4 -0
- remotivelabs_cli-0.5.0a1.dist-info/entry_points.txt +3 -0
- remotivelabs_cli-0.5.0a1.dist-info/licenses/LICENSE +17 -0
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import binascii
|
|
4
|
+
import json
|
|
5
|
+
import queue
|
|
6
|
+
from threading import Thread
|
|
7
|
+
from typing import Any, Callable, Iterable, Iterator, List, Optional, Union
|
|
8
|
+
|
|
9
|
+
import grpc
|
|
10
|
+
|
|
11
|
+
import remotivelabs.broker._generated.common_pb2 as common
|
|
12
|
+
import remotivelabs.broker._generated.network_api_pb2 as network_api
|
|
13
|
+
import remotivelabs.broker._generated.network_api_pb2_grpc as network_api_grpc
|
|
14
|
+
import remotivelabs.broker._generated.system_api_pb2_grpc as system_api_grpc
|
|
15
|
+
import remotivelabs.broker._generated.traffic_api_pb2_grpc as traffic_api_grpc
|
|
16
|
+
from remotivelabs.cli.broker.lib.helper import act_on_signal, create_channel
|
|
17
|
+
from remotivelabs.cli.broker.lib.signalcreator import SignalCreator
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class SignalValue:
|
|
21
|
+
"""
|
|
22
|
+
Wrapper around protobuf generated class to make it a bit simpler to use
|
|
23
|
+
to make us learn how we want the next version of the API to look like.
|
|
24
|
+
|
|
25
|
+
Use the signal.is_{type}() functions to validate type before you get value.
|
|
26
|
+
Use signal.value() to get the actual value without any validation.
|
|
27
|
+
Use signal.get_raw() to get the raw bytes
|
|
28
|
+
Use signal.{type}_value() to get a validated typed value or error if something is wrong
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def __init__(self, signal: network_api.Signal):
|
|
32
|
+
self.signal: network_api.Signal = signal
|
|
33
|
+
|
|
34
|
+
def __str__(self) -> str:
|
|
35
|
+
return self.to_json()
|
|
36
|
+
|
|
37
|
+
def to_json(self) -> str:
|
|
38
|
+
return json.dumps(self.as_dict())
|
|
39
|
+
|
|
40
|
+
def is_integer(self) -> bool:
|
|
41
|
+
return self.signal.HasField("integer")
|
|
42
|
+
|
|
43
|
+
def is_double(self) -> bool:
|
|
44
|
+
return self.signal.HasField("double")
|
|
45
|
+
|
|
46
|
+
def is_arbitration(self) -> bool:
|
|
47
|
+
return self.signal.HasField("arbitration")
|
|
48
|
+
|
|
49
|
+
def is_raw(self) -> bool:
|
|
50
|
+
return self.signal.raw != b""
|
|
51
|
+
|
|
52
|
+
def get_raw(self) -> Union[bytes, None]:
|
|
53
|
+
if self.is_raw():
|
|
54
|
+
return self.signal.raw
|
|
55
|
+
return None
|
|
56
|
+
|
|
57
|
+
def __get_value(self) -> Union[str, int, float, bool, None]:
|
|
58
|
+
if self.signal.raw != b"":
|
|
59
|
+
return "0x" + binascii.hexlify(self.signal.raw).decode("ascii")
|
|
60
|
+
if self.signal.HasField("integer"):
|
|
61
|
+
return self.signal.integer
|
|
62
|
+
if self.signal.HasField("double"):
|
|
63
|
+
return self.signal.double
|
|
64
|
+
if self.signal.HasField("arbitration"):
|
|
65
|
+
return self.signal.arbitration
|
|
66
|
+
return None
|
|
67
|
+
|
|
68
|
+
def timestamp_us(self) -> int:
|
|
69
|
+
return self.signal.timestamp
|
|
70
|
+
|
|
71
|
+
def name(self) -> str:
|
|
72
|
+
return self.signal.id.name
|
|
73
|
+
|
|
74
|
+
def namespace(self) -> str:
|
|
75
|
+
return self.signal.id.namespace.name
|
|
76
|
+
|
|
77
|
+
def value(self) -> Union[str, int, float, bool, None]:
|
|
78
|
+
return self.__get_value()
|
|
79
|
+
|
|
80
|
+
def __get_with_ensured_type(self, t: type) -> Any:
|
|
81
|
+
v = self.__get_value()
|
|
82
|
+
if isinstance(v, t):
|
|
83
|
+
return v
|
|
84
|
+
raise BrokerException(f"{v} was not expected type '{t}' but got '{type(v)}'")
|
|
85
|
+
|
|
86
|
+
def float_value(self) -> float:
|
|
87
|
+
return self.__get_with_ensured_type(float) # type: ignore
|
|
88
|
+
|
|
89
|
+
def int_value(self) -> int:
|
|
90
|
+
return self.__get_with_ensured_type(int) # type: ignore
|
|
91
|
+
|
|
92
|
+
def bool_value(self) -> bool:
|
|
93
|
+
return self.__get_with_ensured_type(bool) # type: ignore
|
|
94
|
+
|
|
95
|
+
def bytes_value(self) -> bytes:
|
|
96
|
+
return self.__get_with_ensured_type(bytes) # type: ignore
|
|
97
|
+
|
|
98
|
+
def as_dict(self) -> dict[str, Any]:
|
|
99
|
+
return {
|
|
100
|
+
"timestamp_us": self.timestamp_us(),
|
|
101
|
+
"name": self.name(),
|
|
102
|
+
"namespace": self.namespace(),
|
|
103
|
+
"value": self.value(),
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class SignalsInFrame(Iterable[SignalValue]):
|
|
108
|
+
def __init__(self, signals: List[SignalValue]):
|
|
109
|
+
self.signals = signals
|
|
110
|
+
self.index = 0
|
|
111
|
+
|
|
112
|
+
def __iter__(self) -> Iterator[SignalValue]:
|
|
113
|
+
return self
|
|
114
|
+
|
|
115
|
+
def __next__(self) -> SignalValue:
|
|
116
|
+
try:
|
|
117
|
+
result = self.signals[self.index]
|
|
118
|
+
except IndexError:
|
|
119
|
+
raise StopIteration
|
|
120
|
+
self.index += 1
|
|
121
|
+
return result
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
class SignalIdentifier:
|
|
125
|
+
def __init__(self, name: str, namespace: str):
|
|
126
|
+
self.name = name
|
|
127
|
+
self.namespace = namespace
|
|
128
|
+
|
|
129
|
+
@staticmethod
|
|
130
|
+
def parse(signal_id: str) -> SignalIdentifier:
|
|
131
|
+
s = signal_id.split(":")
|
|
132
|
+
if len(s) != 2:
|
|
133
|
+
raise BrokerException("signal names must have format namespace:signal_name")
|
|
134
|
+
return SignalIdentifier(s[1], s[0])
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
class BrokerException(Exception): # noqa: N818
|
|
138
|
+
pass
|
|
139
|
+
|
|
140
|
+
|
|
141
|
+
class Client:
|
|
142
|
+
def __init__(self, client_id: str = "broker_client"):
|
|
143
|
+
self._signal_creator: SignalCreator
|
|
144
|
+
self._traffic_stub: traffic_api_grpc.TrafficServiceStub
|
|
145
|
+
self._system_stub: system_api_grpc.SystemServiceStub
|
|
146
|
+
self._network_stub: network_api_grpc.NetworkServiceStub
|
|
147
|
+
self._intercept_channel: grpc.Channel
|
|
148
|
+
self.client_id = client_id
|
|
149
|
+
self.url: Optional[str] = None
|
|
150
|
+
self.api_key: Optional[str] = None
|
|
151
|
+
self.on_connect: Union[Callable[[Client], None], None] = None
|
|
152
|
+
self.on_signals: Union[Callable[[SignalsInFrame], None], None] = None
|
|
153
|
+
|
|
154
|
+
def connect(self, url: str, api_key: Union[str, None] = None) -> None:
|
|
155
|
+
self.url = url
|
|
156
|
+
self.api_key = api_key
|
|
157
|
+
if url.startswith("https"):
|
|
158
|
+
if api_key is None:
|
|
159
|
+
raise BrokerException("You must supply api-key or access-token to use a cloud broker")
|
|
160
|
+
self._intercept_channel = create_channel(url, self.api_key, None)
|
|
161
|
+
else:
|
|
162
|
+
self._intercept_channel = create_channel(url, None, None)
|
|
163
|
+
|
|
164
|
+
self._network_stub = network_api_grpc.NetworkServiceStub(self._intercept_channel)
|
|
165
|
+
self._system_stub = system_api_grpc.SystemServiceStub(self._intercept_channel)
|
|
166
|
+
self._traffic_stub = traffic_api_grpc.TrafficServiceStub(self._intercept_channel)
|
|
167
|
+
self._signal_creator = SignalCreator(self._system_stub)
|
|
168
|
+
if self.on_connect is not None:
|
|
169
|
+
self.on_connect(self)
|
|
170
|
+
|
|
171
|
+
def subscribe(
|
|
172
|
+
self,
|
|
173
|
+
signals_to_subscribe_to: List[SignalIdentifier],
|
|
174
|
+
on_signals: Optional[Callable[[SignalsInFrame], None]] = None,
|
|
175
|
+
changed_values_only: bool = True,
|
|
176
|
+
) -> Any:
|
|
177
|
+
client_id = common.ClientId(id="subscribe-sample")
|
|
178
|
+
if on_signals is None and self.on_signals is None:
|
|
179
|
+
raise BrokerException(
|
|
180
|
+
"You have not specified global client.on_signals nor client.subscribe(on_signals=callback), "
|
|
181
|
+
"or you are invoking subscribe() before client.on_signals which is not allowed"
|
|
182
|
+
)
|
|
183
|
+
|
|
184
|
+
def to_protobuf_signal(s: SignalIdentifier) -> common.SignalId:
|
|
185
|
+
return self._signal_creator.signal(s.name, s.namespace)
|
|
186
|
+
|
|
187
|
+
_signals_to_subscribe_on = list(map(to_protobuf_signal, signals_to_subscribe_to))
|
|
188
|
+
wait_for_subscription_queue: queue.Queue[tuple[str, Any]] = queue.Queue()
|
|
189
|
+
Thread(
|
|
190
|
+
target=act_on_signal,
|
|
191
|
+
args=(
|
|
192
|
+
client_id,
|
|
193
|
+
self._network_stub,
|
|
194
|
+
_signals_to_subscribe_on,
|
|
195
|
+
changed_values_only, # True: only report when signal changes
|
|
196
|
+
lambda frame: self._on_signals(frame, on_signals),
|
|
197
|
+
lambda sub: (wait_for_subscription_queue.put((self.client_id, sub))),
|
|
198
|
+
),
|
|
199
|
+
).start()
|
|
200
|
+
# Wait for subscription
|
|
201
|
+
client_id, subscription = wait_for_subscription_queue.get() # type: ignore[assignment]
|
|
202
|
+
return subscription
|
|
203
|
+
|
|
204
|
+
def _on_signals(self, signals_in_frame: network_api.Signals, callback: Optional[Callable[[SignalsInFrame], None]]) -> None:
|
|
205
|
+
"""
|
|
206
|
+
Updates "local" callback or global on_signals callback if local callback is None
|
|
207
|
+
"""
|
|
208
|
+
if callback is not None:
|
|
209
|
+
callback(SignalsInFrame(list(map(SignalValue, signals_in_frame)))) # type: ignore[call-overload]
|
|
210
|
+
elif self.on_signals is not None:
|
|
211
|
+
self.on_signals(SignalsInFrame(list(map(SignalValue, signals_in_frame)))) # type: ignore[call-overload]
|
|
212
|
+
|
|
213
|
+
def list_signal_names(self) -> List[SignalIdentifier]:
|
|
214
|
+
# Lists available signals
|
|
215
|
+
configuration = self._system_stub.GetConfiguration(common.Empty())
|
|
216
|
+
|
|
217
|
+
signal_names: List[SignalIdentifier] = []
|
|
218
|
+
for network_info in configuration.networkInfo:
|
|
219
|
+
res = self._system_stub.ListSignals(network_info.namespace)
|
|
220
|
+
for finfo in res.frame:
|
|
221
|
+
signal_names.append(SignalIdentifier(finfo.signalInfo.id.name, network_info.namespace.name))
|
|
222
|
+
for sinfo in finfo.childInfo:
|
|
223
|
+
signal_names.append(SignalIdentifier(sinfo.id.name, network_info.namespace.name))
|
|
224
|
+
return signal_names
|
|
@@ -0,0 +1,277 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import hashlib
|
|
4
|
+
import itertools
|
|
5
|
+
import ntpath
|
|
6
|
+
import os
|
|
7
|
+
import posixpath
|
|
8
|
+
from glob import glob
|
|
9
|
+
from logging import getLogger
|
|
10
|
+
from typing import Any, BinaryIO, Callable, Generator, Optional, Sequence
|
|
11
|
+
from urllib.parse import urlparse
|
|
12
|
+
|
|
13
|
+
import grpc
|
|
14
|
+
from grpc_interceptor import ClientCallDetails, ClientInterceptor
|
|
15
|
+
|
|
16
|
+
import remotivelabs.broker._generated.common_pb2 as common
|
|
17
|
+
import remotivelabs.broker._generated.network_api_pb2 as network_api
|
|
18
|
+
import remotivelabs.broker._generated.network_api_pb2_grpc as network_api_grpc
|
|
19
|
+
import remotivelabs.broker._generated.system_api_pb2 as system_api
|
|
20
|
+
import remotivelabs.broker._generated.system_api_pb2_grpc as system_api_grpc
|
|
21
|
+
from remotivelabs.cli.utils.console import print_generic_error
|
|
22
|
+
|
|
23
|
+
log = getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class HeaderInterceptor(ClientInterceptor):
|
|
27
|
+
def __init__(self, header_dict: dict[str, str]):
|
|
28
|
+
self.header_dict = header_dict
|
|
29
|
+
|
|
30
|
+
def intercept(
|
|
31
|
+
self,
|
|
32
|
+
method: Callable[[Any, ClientCallDetails], Any],
|
|
33
|
+
request_or_iterator: Any,
|
|
34
|
+
call_details: grpc.ClientCallDetails,
|
|
35
|
+
) -> Any:
|
|
36
|
+
new_details = ClientCallDetails(
|
|
37
|
+
call_details.method,
|
|
38
|
+
call_details.timeout,
|
|
39
|
+
self.header_dict.items(), # type: ignore[arg-type]
|
|
40
|
+
call_details.credentials,
|
|
41
|
+
call_details.wait_for_ready,
|
|
42
|
+
call_details.compression,
|
|
43
|
+
)
|
|
44
|
+
|
|
45
|
+
return method(request_or_iterator, new_details)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def create_channel(url: str, x_api_key: Optional[str] = None, authorization_token: Optional[str] = None) -> grpc.Channel:
|
|
49
|
+
"""
|
|
50
|
+
Create communication channels for gRPC calls.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
parsed_url = urlparse(url)
|
|
54
|
+
if parsed_url.hostname is None:
|
|
55
|
+
msg = f"invalid url {url}, missing hostname"
|
|
56
|
+
raise ValueError(msg)
|
|
57
|
+
|
|
58
|
+
if parsed_url.scheme == "https":
|
|
59
|
+
creds = grpc.ssl_channel_credentials(root_certificates=None, private_key=None, certificate_chain=None)
|
|
60
|
+
channel = grpc.secure_channel(parsed_url.hostname + ":" + str(parsed_url.port or "443"), creds)
|
|
61
|
+
else:
|
|
62
|
+
addr = parsed_url.hostname + ":" + str(parsed_url.port or "50051")
|
|
63
|
+
channel = grpc.insecure_channel(addr)
|
|
64
|
+
|
|
65
|
+
if x_api_key is None and authorization_token is None:
|
|
66
|
+
return channel
|
|
67
|
+
|
|
68
|
+
if x_api_key is not None:
|
|
69
|
+
return grpc.intercept_channel(channel, HeaderInterceptor({"x-api-key": x_api_key}))
|
|
70
|
+
|
|
71
|
+
# Adding both x-api-key (old) and authorization header for compatibility
|
|
72
|
+
return grpc.intercept_channel(
|
|
73
|
+
channel,
|
|
74
|
+
HeaderInterceptor(
|
|
75
|
+
{
|
|
76
|
+
"x-api-key": authorization_token, # type: ignore
|
|
77
|
+
"authorization": f"Bearer {authorization_token}",
|
|
78
|
+
}
|
|
79
|
+
),
|
|
80
|
+
)
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
def publish_signals(
|
|
84
|
+
client_id: common.ClientId,
|
|
85
|
+
stub: network_api_grpc.NetworkServiceStub,
|
|
86
|
+
signals_with_payload: Sequence[network_api.Signal],
|
|
87
|
+
frequency: int = 0,
|
|
88
|
+
) -> None:
|
|
89
|
+
"""
|
|
90
|
+
Publish array of values for signals
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
publisher_info = network_api.PublisherConfig(
|
|
94
|
+
clientId=client_id,
|
|
95
|
+
signals=network_api.Signals(signal=signals_with_payload),
|
|
96
|
+
frequency=frequency,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
try:
|
|
100
|
+
stub.PublishSignals(publisher_info)
|
|
101
|
+
except grpc._channel._Rendezvous as err: # type:ignore[attr-defined]
|
|
102
|
+
log.error(err)
|
|
103
|
+
|
|
104
|
+
|
|
105
|
+
def printer(signals: Sequence[common.SignalId]) -> None:
|
|
106
|
+
"""
|
|
107
|
+
Debug printing of received array of signal with values.
|
|
108
|
+
"""
|
|
109
|
+
|
|
110
|
+
for signal in signals:
|
|
111
|
+
log.info(f"{signal} {signal.namespace.name}")
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def get_sha256(path: str) -> str:
|
|
115
|
+
"""
|
|
116
|
+
Calculate SHA256 for a file.
|
|
117
|
+
"""
|
|
118
|
+
|
|
119
|
+
with open(path, "rb") as f:
|
|
120
|
+
b = f.read() # read entire file as bytes
|
|
121
|
+
return hashlib.sha256(b).hexdigest()
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def generate_data(file: BinaryIO, dest_path: str, chunk_size: int, sha256: str) -> Generator[system_api.FileUploadRequest, None, None]:
|
|
125
|
+
for x in itertools.count(start=0):
|
|
126
|
+
if x == 0:
|
|
127
|
+
file_description = system_api.FileDescription(sha256=sha256, path=dest_path)
|
|
128
|
+
yield system_api.FileUploadRequest(fileDescription=file_description)
|
|
129
|
+
else:
|
|
130
|
+
buf = file.read(chunk_size)
|
|
131
|
+
if not buf:
|
|
132
|
+
break
|
|
133
|
+
yield system_api.FileUploadRequest(chunk=buf)
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def upload_file(system_stub: system_api_grpc.SystemServiceStub, path: str, dest_path: str) -> None:
|
|
137
|
+
"""
|
|
138
|
+
Upload single file to internal storage on broker.
|
|
139
|
+
"""
|
|
140
|
+
|
|
141
|
+
sha256 = get_sha256(path)
|
|
142
|
+
log.debug(f"SHA256 for file {path}: {sha256}")
|
|
143
|
+
with open(path, "rb") as file:
|
|
144
|
+
# make sure path is unix style (necessary for windows, and does no harm om
|
|
145
|
+
# linux)
|
|
146
|
+
upload_iterator = generate_data(file, dest_path.replace(ntpath.sep, posixpath.sep), 1000000, sha256)
|
|
147
|
+
response = system_stub.UploadFile(upload_iterator, compression=grpc.Compression.Gzip)
|
|
148
|
+
log.debug(f"Uploaded {path} with response {response}")
|
|
149
|
+
|
|
150
|
+
|
|
151
|
+
def download_file(system_stub: system_api_grpc.SystemServiceStub, path: str, dest_path: str) -> None:
|
|
152
|
+
"""
|
|
153
|
+
Download file from Broker remote storage.
|
|
154
|
+
"""
|
|
155
|
+
|
|
156
|
+
with open(dest_path, "wb") as file:
|
|
157
|
+
for response in system_stub.BatchDownloadFiles(
|
|
158
|
+
system_api.FileDescriptions(fileDescriptions=[system_api.FileDescription(path=path.replace(ntpath.sep, posixpath.sep))])
|
|
159
|
+
):
|
|
160
|
+
assert not response.HasField("errorMessage"), f"Error uploading file, message is: {response.errorMessage}"
|
|
161
|
+
file.write(response.chunk)
|
|
162
|
+
|
|
163
|
+
|
|
164
|
+
def upload_folder(system_stub: system_api_grpc.SystemServiceStub, folder: str) -> None:
|
|
165
|
+
"""
|
|
166
|
+
Upload directory and its content to Broker remote storage.
|
|
167
|
+
"""
|
|
168
|
+
|
|
169
|
+
files = [y for x in os.walk(folder) for y in glob(os.path.join(x[0], "*")) if not os.path.isdir(y)]
|
|
170
|
+
assert len(files) != 0, "Specified upload folder is empty or does not exist"
|
|
171
|
+
for file in files:
|
|
172
|
+
upload_file(system_stub, file, file.replace(folder, ""))
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def reload_configuration(
|
|
176
|
+
system_stub: system_api_grpc.SystemServiceStub,
|
|
177
|
+
) -> None:
|
|
178
|
+
"""
|
|
179
|
+
Trigger reload of configuration on Broker.
|
|
180
|
+
"""
|
|
181
|
+
|
|
182
|
+
request = common.Empty()
|
|
183
|
+
response = system_stub.ReloadConfiguration(request, timeout=60000)
|
|
184
|
+
log.debug(f"Reload configuration with response {response}")
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
def check_license(
|
|
188
|
+
system_stub: system_api_grpc.SystemServiceStub,
|
|
189
|
+
) -> None:
|
|
190
|
+
"""
|
|
191
|
+
Check license to Broker. Throws exception if failure.
|
|
192
|
+
"""
|
|
193
|
+
status = system_stub.GetLicenseInfo(common.Empty()).status
|
|
194
|
+
assert status == system_api.LicenseStatus.VALID, f"Check your license, status is: {status}"
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def act_on_signal( # noqa: PLR0913
|
|
198
|
+
client_id: common.ClientId,
|
|
199
|
+
network_stub: network_api_grpc.NetworkServiceStub,
|
|
200
|
+
sub_signals: Sequence[common.SignalId],
|
|
201
|
+
on_change: bool,
|
|
202
|
+
fun: Callable[[Sequence[network_api.Signal]], None],
|
|
203
|
+
on_subscribed: Optional[Callable[..., None]] = None,
|
|
204
|
+
) -> None:
|
|
205
|
+
"""
|
|
206
|
+
Bind callback to be triggered when receiving any of the specified signals.
|
|
207
|
+
"""
|
|
208
|
+
|
|
209
|
+
log.debug("Subscription started")
|
|
210
|
+
|
|
211
|
+
sub_info = network_api.SubscriberConfig(
|
|
212
|
+
clientId=client_id,
|
|
213
|
+
signals=network_api.SignalIds(signalId=sub_signals),
|
|
214
|
+
onChange=on_change,
|
|
215
|
+
)
|
|
216
|
+
try:
|
|
217
|
+
subscripton = network_stub.SubscribeToSignals(sub_info, timeout=None)
|
|
218
|
+
if on_subscribed:
|
|
219
|
+
on_subscribed(subscripton)
|
|
220
|
+
log.debug("Waiting for signal...")
|
|
221
|
+
for subs_counter in subscripton:
|
|
222
|
+
fun(subs_counter.signal)
|
|
223
|
+
|
|
224
|
+
except grpc.RpcError as e:
|
|
225
|
+
# Only try to cancel if cancel was not already attempted
|
|
226
|
+
if e.code() != grpc.StatusCode.CANCELLED:
|
|
227
|
+
try:
|
|
228
|
+
subscripton.cancel()
|
|
229
|
+
print_generic_error("A gRPC error occurred:")
|
|
230
|
+
print_generic_error(str(e))
|
|
231
|
+
except grpc.RpcError:
|
|
232
|
+
pass
|
|
233
|
+
except grpc._channel._Rendezvous as err: # type:ignore[attr-defined]
|
|
234
|
+
log.error(err)
|
|
235
|
+
# reload, alternatively non-existing signal
|
|
236
|
+
log.debug("Subscription terminated")
|
|
237
|
+
|
|
238
|
+
|
|
239
|
+
def act_on_scripted_signal( # noqa: PLR0913
|
|
240
|
+
client_id: common.ClientId,
|
|
241
|
+
network_stub: network_api_grpc.NetworkServiceStub,
|
|
242
|
+
script: bytes,
|
|
243
|
+
on_change: bool,
|
|
244
|
+
fun: Callable[[Sequence[network_api.Signal]], None],
|
|
245
|
+
on_subscribed: Optional[Callable[..., None]] = None,
|
|
246
|
+
) -> None:
|
|
247
|
+
"""
|
|
248
|
+
Bind callback to be triggered when receiving any of the specified signals.
|
|
249
|
+
"""
|
|
250
|
+
|
|
251
|
+
log.debug("Subscription with mapping code started...")
|
|
252
|
+
|
|
253
|
+
sub_info = network_api.SubscriberWithScriptConfig(
|
|
254
|
+
clientId=client_id,
|
|
255
|
+
script=script,
|
|
256
|
+
onChange=on_change,
|
|
257
|
+
)
|
|
258
|
+
try:
|
|
259
|
+
subscription = network_stub.SubscribeToSignalWithScript(sub_info, timeout=None)
|
|
260
|
+
if on_subscribed:
|
|
261
|
+
on_subscribed(subscription)
|
|
262
|
+
log.debug("Waiting for signal...")
|
|
263
|
+
for subs_counter in subscription:
|
|
264
|
+
fun(subs_counter.signal)
|
|
265
|
+
|
|
266
|
+
except grpc.RpcError as e:
|
|
267
|
+
try:
|
|
268
|
+
subscription.cancel()
|
|
269
|
+
print_generic_error("A gRPC error occurred:")
|
|
270
|
+
print_generic_error(str(e))
|
|
271
|
+
except grpc.RpcError:
|
|
272
|
+
pass
|
|
273
|
+
|
|
274
|
+
except grpc._channel._Rendezvous as err: # type:ignore[attr-defined]
|
|
275
|
+
log.error(err)
|
|
276
|
+
# reload, alternatively non-existing signal
|
|
277
|
+
log.debug("Subscription terminated")
|
|
@@ -0,0 +1,196 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
from typing import Any, Dict, List, Optional, Sequence, TypeVar
|
|
5
|
+
|
|
6
|
+
import remotivelabs.broker._generated.common_pb2 as common
|
|
7
|
+
import remotivelabs.broker._generated.network_api_pb2 as network_api
|
|
8
|
+
import remotivelabs.broker._generated.system_api_pb2_grpc as system_api_grpc
|
|
9
|
+
|
|
10
|
+
T = TypeVar("T")
|
|
11
|
+
|
|
12
|
+
_logger = logging.getLogger("remotivelabs.SignalCreator")
|
|
13
|
+
_MSG_DUPLICATE = "Warning duplicated (namespace.signal): {}, to avoid" + 'ambiguity set "short_names": false in your interfaces.json on {}'
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class MetaGetter:
|
|
17
|
+
def __init__(self, proto_message: Any):
|
|
18
|
+
self.meta = proto_message
|
|
19
|
+
|
|
20
|
+
def _getDefault(self, field: T, default: Optional[T]) -> T: # noqa: N802
|
|
21
|
+
if field is not None:
|
|
22
|
+
return field
|
|
23
|
+
|
|
24
|
+
if default:
|
|
25
|
+
return default
|
|
26
|
+
|
|
27
|
+
raise Exception("Failed to retrieve meta data field")
|
|
28
|
+
|
|
29
|
+
def getDescription(self, default: Optional[str] = None) -> str: # noqa: N802
|
|
30
|
+
"""Get protobuffer MetaData field description"""
|
|
31
|
+
return self._getDefault(self.meta.description, default) # type: ignore
|
|
32
|
+
|
|
33
|
+
def getUnit(self, default: Optional[str] = None) -> str: # noqa: N802
|
|
34
|
+
"""Get protobuffer MetaData field unit"""
|
|
35
|
+
return self._getDefault(self.meta.unit, default) # type: ignore
|
|
36
|
+
|
|
37
|
+
def getMax(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
38
|
+
"""Get protobuffer MetaData field max"""
|
|
39
|
+
return self._getDefault(self.meta.max, default) # type: ignore
|
|
40
|
+
|
|
41
|
+
def getMin(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
42
|
+
"""Get protobuffer MetaData field min"""
|
|
43
|
+
return self._getDefault(self.meta.min, default) # type: ignore
|
|
44
|
+
|
|
45
|
+
def getSize(self, default: Optional[int] = None) -> int: # noqa: N802
|
|
46
|
+
"""Get protobuffer MetaData field size"""
|
|
47
|
+
return self._getDefault(self.meta.size, default) # type: ignore
|
|
48
|
+
|
|
49
|
+
def getIsRaw(self, default: Optional[bool] = None) -> bool: # noqa: N802
|
|
50
|
+
"""Get protobuffer MetaData field isRaw"""
|
|
51
|
+
return self._getDefault(self.meta.isRaw, default) # type: ignore
|
|
52
|
+
|
|
53
|
+
def getFactor(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
54
|
+
"""Get protobuffer MetaData field factor"""
|
|
55
|
+
return self._getDefault(self.meta.factor, default) # type: ignore
|
|
56
|
+
|
|
57
|
+
def getOffset(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
58
|
+
"""Get protobuffer MetaData field offset"""
|
|
59
|
+
return self._getDefault(self.meta.offset, default) # type: ignore
|
|
60
|
+
|
|
61
|
+
def getSenders(self, default: Optional[Sequence[str]] = None) -> Sequence[str]: # noqa: N802
|
|
62
|
+
"""Get protobuffer MetaData field sender"""
|
|
63
|
+
return self._getDefault(self.meta.sender, default)
|
|
64
|
+
|
|
65
|
+
def getReceivers(self, default: Optional[Sequence[str]] = None) -> Sequence[str]: # noqa: N802
|
|
66
|
+
"""Get protobuffer MetaData field receiver"""
|
|
67
|
+
return self._getDefault(self.meta.receiver, default)
|
|
68
|
+
|
|
69
|
+
def getCycleTime(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
70
|
+
"""Get protobuffer MetaData field cycleTime"""
|
|
71
|
+
return self._getDefault(self.meta.cycleTime, default) # type: ignore
|
|
72
|
+
|
|
73
|
+
def getStartValue(self, default: Optional[float] = None) -> float: # noqa: N802
|
|
74
|
+
"""Get protobuffer MetaData field startValue"""
|
|
75
|
+
return self._getDefault(self.meta.startValue, default) # type: ignore
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
class SignalCreator:
|
|
79
|
+
"""
|
|
80
|
+
Class for prepearing and writing signals via gRPC.
|
|
81
|
+
"""
|
|
82
|
+
|
|
83
|
+
def __init__(self, system_stub: system_api_grpc.SystemServiceStub, namespaces: List[str] | None = None):
|
|
84
|
+
self._sinfos: Dict[Any, Any] = {}
|
|
85
|
+
self._virtual: List[Any] = []
|
|
86
|
+
self._networks: Dict[Any, Any] = {}
|
|
87
|
+
nss: List[common.NameSpace] = []
|
|
88
|
+
if namespaces is None:
|
|
89
|
+
conf = system_stub.GetConfiguration(common.Empty())
|
|
90
|
+
for ninfo in conf.networkInfo:
|
|
91
|
+
nss.append(ninfo.namespace)
|
|
92
|
+
if ninfo.type == "virtual":
|
|
93
|
+
self._virtual.append(ninfo.namespace.name)
|
|
94
|
+
else:
|
|
95
|
+
nss = list(map(lambda namespace: common.NameSpace(name=namespace), namespaces))
|
|
96
|
+
|
|
97
|
+
for namespace in nss:
|
|
98
|
+
res = system_stub.ListSignals(namespace)
|
|
99
|
+
self._addframes(namespace, res)
|
|
100
|
+
for finfo in res.frame:
|
|
101
|
+
self._add(finfo.signalInfo)
|
|
102
|
+
for sinfo in finfo.childInfo:
|
|
103
|
+
self._add(sinfo)
|
|
104
|
+
|
|
105
|
+
def _addframes(self, namespace: common.NameSpace, res: common.Frames) -> None:
|
|
106
|
+
self._networks[namespace.name] = res
|
|
107
|
+
|
|
108
|
+
def _add(self, sinfo: common.SignalInfo) -> None:
|
|
109
|
+
k = (sinfo.id.namespace.name, sinfo.id.name)
|
|
110
|
+
if k in self._sinfos:
|
|
111
|
+
msg = _MSG_DUPLICATE.format(k, sinfo.id.namespace)
|
|
112
|
+
_logger.warning(msg)
|
|
113
|
+
self._sinfos[k] = MetaGetter(sinfo.metaData)
|
|
114
|
+
|
|
115
|
+
def get_meta(self, name: str, namespace_name: str) -> MetaGetter:
|
|
116
|
+
"""
|
|
117
|
+
Get meta fields for signal or frame
|
|
118
|
+
"""
|
|
119
|
+
|
|
120
|
+
k = (namespace_name, name)
|
|
121
|
+
if (k not in self._sinfos) and (namespace_name not in self._virtual):
|
|
122
|
+
raise Exception(f"signal not declared (namespace, signal): {k}")
|
|
123
|
+
return self._sinfos[k] # type: ignore
|
|
124
|
+
|
|
125
|
+
def signal(self, name: str, namespace_name: str) -> common.SignalId:
|
|
126
|
+
"""
|
|
127
|
+
Create object for signal.
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
self.get_meta(name, namespace_name) # Checks if the signal is present
|
|
131
|
+
return common.SignalId(name=name, namespace=common.NameSpace(name=namespace_name))
|
|
132
|
+
|
|
133
|
+
def frames(self, namespace_name: str) -> Sequence[common.SignalId]:
|
|
134
|
+
"""
|
|
135
|
+
Get all frames in given namespace
|
|
136
|
+
"""
|
|
137
|
+
|
|
138
|
+
all_frames = []
|
|
139
|
+
for finfo in self._networks[namespace_name].frame:
|
|
140
|
+
all_frames.append(self.signal(finfo.signalInfo.id.name, namespace_name))
|
|
141
|
+
return all_frames
|
|
142
|
+
|
|
143
|
+
def frame_by_signal(self, name: str, namespace_name: str) -> common.SignalId:
|
|
144
|
+
"""
|
|
145
|
+
Get frame for the given signal.
|
|
146
|
+
"""
|
|
147
|
+
|
|
148
|
+
for finfo in self._networks[namespace_name].frame:
|
|
149
|
+
for sinfo in finfo.childInfo:
|
|
150
|
+
if sinfo.id.name == name:
|
|
151
|
+
return self.signal(finfo.signalInfo.id.name, namespace_name)
|
|
152
|
+
raise Exception(f"signal not declared (namespace, signal): {namespace_name} {name}")
|
|
153
|
+
|
|
154
|
+
def signals_in_frame(self, name: str, namespace_name: str) -> Sequence[common.SignalId]:
|
|
155
|
+
"""
|
|
156
|
+
Get all signals residing in the frame.
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
all_signals = []
|
|
160
|
+
frame = None
|
|
161
|
+
for finfo in self._networks[namespace_name].frame:
|
|
162
|
+
if finfo.signalInfo.id.name == name:
|
|
163
|
+
frame = finfo
|
|
164
|
+
for sinfo in finfo.childInfo:
|
|
165
|
+
all_signals.append(self.signal(sinfo.id.name, namespace_name))
|
|
166
|
+
assert frame is not None, f"frame {name} does not exist in namespace {namespace_name}"
|
|
167
|
+
return all_signals
|
|
168
|
+
|
|
169
|
+
def signal_with_payload(
|
|
170
|
+
self, name: str, namespace_name: str, value_pair: tuple[str, Any], allow_malformed: bool = False
|
|
171
|
+
) -> network_api.Signal:
|
|
172
|
+
"""
|
|
173
|
+
Create value with signal for writing.
|
|
174
|
+
"""
|
|
175
|
+
|
|
176
|
+
signal = self.signal(name, namespace_name)
|
|
177
|
+
meta = self.get_meta(name, namespace_name)
|
|
178
|
+
|
|
179
|
+
key, value = value_pair
|
|
180
|
+
types = ["integer", "double", "raw", "arbitration"]
|
|
181
|
+
if key not in types:
|
|
182
|
+
raise Exception(f"type must be one of: {types}")
|
|
183
|
+
if key == "raw" and allow_malformed is False:
|
|
184
|
+
expected = meta.getSize()
|
|
185
|
+
assert len(value) * 8 == expected, f"payload size missmatch, expected {expected / 8} bytes"
|
|
186
|
+
elif key != "raw":
|
|
187
|
+
# Check bounds if any
|
|
188
|
+
check_min = meta.getMin()
|
|
189
|
+
if (check_min is not None) and (value < check_min):
|
|
190
|
+
_logger.warning(f'Value below minimum value of {check_min} for signal "{name}"')
|
|
191
|
+
check_max = meta.getMax()
|
|
192
|
+
if (check_max is not None) and (value > check_max):
|
|
193
|
+
_logger.warning(f'Value above maximum value of {check_max} for signal "{name}"')
|
|
194
|
+
|
|
195
|
+
params = {"id": signal, key: value}
|
|
196
|
+
return network_api.Signal(**params)
|