auto-trainer-api 0.9.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of auto-trainer-api might be problematic. Click here for more details.

@@ -0,0 +1,355 @@
1
+ .idea
2
+
3
+ # Build results
4
+ [Dd]ebug/
5
+ [Dd]ebugPublic/
6
+ [Rr]elease/
7
+ [Rr]eleases/
8
+ x64/
9
+ x86/
10
+ [Ww][Ii][Nn]32/
11
+ [Aa][Rr][Mm]/
12
+ [Aa][Rr][Mm]64/
13
+ bld/
14
+ [Bb]in/
15
+ [Oo]bj/
16
+ [Ll]og/
17
+ [Ll]ogs/
18
+
19
+ # .NET Core
20
+ project.lock.json
21
+ project.fragment.lock.json
22
+ artifacts/
23
+
24
+ # ASP.NET Scaffolding
25
+ ScaffoldingReadMe.txt
26
+
27
+ # NuGet Packages
28
+ *.nupkg
29
+ # NuGet Symbol Packages
30
+ *.snupkg
31
+
32
+ # Others
33
+ ~$*
34
+ *~
35
+ CodeCoverage/
36
+
37
+ # MSBuild Binary and Structured Log
38
+ *.binlog
39
+
40
+ # MSTest test Results
41
+ [Tt]est[Rr]esult*/
42
+ [Bb]uild[Ll]og.*
43
+
44
+ # NUnit
45
+ *.VisualState.xml
46
+ TestResult.xml
47
+ nunit-*.xml
48
+
49
+ # Byte-compiled / optimized / DLL files
50
+ __pycache__/
51
+ *.py[cod]
52
+ *$py.class
53
+
54
+ # C extensions
55
+ *.so
56
+
57
+ # Distribution / packaging
58
+ .Python
59
+ build/
60
+ develop-eggs/
61
+ dist/
62
+ downloads/
63
+ eggs/
64
+ .eggs/
65
+ lib/
66
+ lib64/
67
+ parts/
68
+ sdist/
69
+ var/
70
+ wheels/
71
+ share/python-wheels/
72
+ *.egg-info/
73
+ .installed.cfg
74
+ *.egg
75
+ MANIFEST
76
+
77
+ # PyInstaller
78
+ # Usually these files are written by a python script from a template
79
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
80
+ *.manifest
81
+ *.spec
82
+
83
+ site/
84
+
85
+ # Installer logs
86
+ pip-log.txt
87
+ pip-delete-this-directory.txt
88
+
89
+ ## Ignore Visual Studio temporary files, build results, and
90
+ ## files generated by popular Visual Studio add-ons.
91
+
92
+ # User-specific files
93
+ *.suo
94
+ *.user
95
+ *.userosscache
96
+ *.sln.docstates
97
+
98
+ # User-specific files (MonoDevelop/Xamarin Studio)
99
+ *.userprefs
100
+
101
+ # Build results
102
+ [Dd]ebug/
103
+ [Dd]ebugPublic/
104
+ [Rr]elease/
105
+ [Rr]eleases/
106
+ x64/
107
+ x86/
108
+ bld/
109
+ [Bb]in/
110
+ [Oo]bj/
111
+ [Ll]og/
112
+
113
+ # Visual Studio 2015 cache/options directory
114
+ .vs/
115
+ # Uncomment if you have tasks that create the project's static files in wwwroot
116
+ #wwwroot/
117
+
118
+ # MSTest test Results
119
+ [Tt]est[Rr]esult*/
120
+ [Bb]uild[Ll]og.*
121
+
122
+ # NUNIT
123
+ *.VisualState.xml
124
+ TestResult.xml
125
+
126
+ # Build Results of an ATL Project
127
+ [Dd]ebugPS/
128
+ [Rr]eleasePS/
129
+ dlldata.c
130
+
131
+ # DNX
132
+ project.lock.json
133
+ artifacts/
134
+
135
+ *_i.c
136
+ *_p.c
137
+ *_i.h
138
+ *.ilk
139
+ *.meta
140
+ *.obj
141
+ *.pch
142
+ *.pdb
143
+ *.pgc
144
+ *.pgd
145
+ *.rsp
146
+ *.sbr
147
+ *.tlb
148
+ *.tli
149
+ *.tlh
150
+ *.tmp
151
+ *.tmp_proj
152
+ *.log
153
+ *.vspscc
154
+ *.vssscc
155
+ .builds
156
+ *.pidb
157
+ *.svclog
158
+ *.scc
159
+
160
+ # Chutzpah Test files
161
+ _Chutzpah*
162
+
163
+ # Visual C++ cache files
164
+ ipch/
165
+ *.aps
166
+ *.ncb
167
+ *.opendb
168
+ *.opensdf
169
+ *.sdf
170
+ *.cachefile
171
+ *.VC.db
172
+ *.VC.VC.opendb
173
+
174
+ # Visual Studio profiler
175
+ *.psess
176
+ *.vsp
177
+ *.vspx
178
+ *.sap
179
+
180
+ # TFS 2012 Local Workspace
181
+ $tf/
182
+
183
+ # Guidance Automation Toolkit
184
+ *.gpState
185
+
186
+ # ReSharper is a .NET coding add-in
187
+ _ReSharper*/
188
+ *.[Rr]e[Ss]harper
189
+ *.DotSettings.user
190
+
191
+ # JustCode is a .NET coding add-in
192
+ .JustCode
193
+
194
+ # TeamCity is a build add-in
195
+ _TeamCity*
196
+
197
+ # DotCover is a Code Coverage Tool
198
+ *.dotCover
199
+
200
+ # NCrunch
201
+ _NCrunch_*
202
+ .*crunch*.local.xml
203
+ nCrunchTemp_*
204
+
205
+ # MightyMoose
206
+ *.mm.*
207
+ AutoTest.Net/
208
+
209
+ # Web workbench (sass)
210
+ .sass-cache/
211
+
212
+ # Installshield output folder
213
+ [Ee]xpress/
214
+
215
+ MigrationBackup
216
+
217
+ # DocProject is a documentation generator add-in
218
+ DocProject/buildhelp/
219
+ DocProject/Help/*.HxT
220
+ DocProject/Help/*.HxC
221
+ DocProject/Help/*.hhc
222
+ DocProject/Help/*.hhk
223
+ DocProject/Help/*.hhp
224
+ DocProject/Help/Html2
225
+ DocProject/Help/html
226
+
227
+ # Click-Once directory
228
+ publish/
229
+
230
+ # Publish Web Output
231
+ *.[Pp]ublish.xml
232
+ *.azurePubxml
233
+ # TODO: Comment the next line if you want to checkin your web deploy settings
234
+ # but database connection strings (with potential passwords) will be unencrypted
235
+ # *.pubxml
236
+ *.publishproj
237
+
238
+ # Microsoft Azure Web App publish settings. Comment the next line if you want to
239
+ # checkin your Azure Web App publish settings, but sensitive information contained
240
+ # in these scripts will be unencrypted
241
+ PublishScripts/
242
+
243
+ # NuGet Packages
244
+ *.nupkg
245
+ # The packages folder can be ignored because of Package Restore
246
+ **/packages/*
247
+ # except build/, which is used as an MSBuild target.
248
+ !**/packages/build/
249
+ # Uncomment if necessary however generally it will be regenerated when needed
250
+ #!**/packages/repositories.config
251
+ # NuGet v3's project.json files produces more ignoreable files
252
+ *.nuget.props
253
+ *.nuget.targets
254
+
255
+ # Microsoft Azure Build Output
256
+ csx/
257
+ *.build.csdef
258
+
259
+ # Microsoft Azure Emulator
260
+ ecf/
261
+ rcf/
262
+
263
+ # Windows Store app package directories and files
264
+ AppPackages/
265
+ BundleArtifacts/
266
+ Package.StoreAssociation.xml
267
+ _pkginfo.txt
268
+
269
+ # Visual Studio cache files
270
+ # files ending in .cache can be ignored
271
+ *.[Cc]ache
272
+ # but keep track of directories ending in .cache
273
+ !*.[Cc]ache/
274
+
275
+ # Others
276
+ ClientBin/
277
+ ~$*
278
+ *~
279
+ *.dbmdl
280
+ *.dbproj.schemaview
281
+ *.pfx
282
+ *.publishsettings
283
+ node_modules/
284
+ orleans.codegen.cs
285
+
286
+ # Since there are multiple workflows, uncomment next line to ignore bower_components
287
+ # (https://github.com/github/gitignore/pull/1529#issuecomment-104372622)
288
+ #bower_components/
289
+
290
+ # RIA/Silverlight projects
291
+ Generated_Code/
292
+
293
+ # Backup & report files from converting an old project file
294
+ # to a newer Visual Studio version. Backup files are not needed,
295
+ # because we have git ;-)
296
+ _UpgradeReport_Files/
297
+ UpgradeLog*.XML
298
+ UpgradeLog*.htm
299
+
300
+ # SQL Server files
301
+ *.mdf
302
+ *.ldf
303
+
304
+ # Business Intelligence projects
305
+ *.rdl.data
306
+ *.bim.layout
307
+ *.bim_*.settings
308
+
309
+ # Microsoft Fakes
310
+ FakesAssemblies/
311
+
312
+ # GhostDoc plugin setting file
313
+ *.GhostDoc.xml
314
+
315
+ # Node.js Tools for Visual Studio
316
+ .ntvs_analysis.dat
317
+
318
+ # Visual Studio 6 build log
319
+ *.plg
320
+
321
+ # Visual Studio 6 workspace options file
322
+ *.opt
323
+
324
+ # Visual Studio LightSwitch build output
325
+ **/*.HTMLClient/GeneratedArtifacts
326
+ **/*.DesktopClient/GeneratedArtifacts
327
+ **/*.DesktopClient/ModelManifest.xml
328
+ **/*.Server/GeneratedArtifacts
329
+ **/*.Server/ModelManifest.xml
330
+ _Pvt_Extensions
331
+
332
+ # Paket dependency manager
333
+ .paket/paket.exe
334
+ paket-files/
335
+
336
+ # FAKE - F# Make
337
+ .fake/
338
+
339
+ # JetBrains Rider
340
+ .idea/
341
+ *.sln.iml
342
+
343
+ OriginTheme/
344
+ obj.*/
345
+
346
+ # Part of text fixture datasets that may occur but are not necessary.
347
+
348
+ System/Migrations/
349
+ System/Autobackup/
350
+ .DS_Store
351
+
352
+ Deployment/*/AppComponents.wxs
353
+ Deployment/*/AnalysisComponents.wxs
354
+
355
+ local.settings.json
@@ -0,0 +1,27 @@
1
+ Metadata-Version: 2.4
2
+ Name: auto-trainer-api
3
+ Version: 0.9.0
4
+ Summary: API for interfacing with the core acquisition process via platform and language agnostic message queues.
5
+ License: AGPL-3.0-only
6
+ Classifier: Operating System :: OS Independent
7
+ Classifier: Programming Language :: Python :: 3
8
+ Requires-Python: >=3.8
9
+ Requires-Dist: pyhumps==3.8.0
10
+ Requires-Dist: pyzmq==26.4
11
+ Provides-Extra: telemetry
12
+ Requires-Dist: opentelemetry-api; extra == 'telemetry'
13
+ Requires-Dist: opentelemetry-sdk; extra == 'telemetry'
14
+ Provides-Extra: test
15
+ Requires-Dist: pytest==8.2.0; extra == 'test'
16
+ Description-Content-Type: text/markdown
17
+
18
+ # Autotrainer API: Python Integration
19
+
20
+ The python `auto-trainer-api` module is intended to provide an efficient means to emit information that
21
+ is needed for local or remote management and to receive commands from those sources.
22
+
23
+ The exposed API is intended to be agnostic to the underlying transport layer. The current implementation uses
24
+ ZeroMQ for reasons described in the top-level README.
25
+
26
+ # Installation
27
+ The package is published to the PyPi package index and can installed normally.
@@ -0,0 +1,10 @@
1
+ # Autotrainer API: Python Integration
2
+
3
+ The python `auto-trainer-api` module is intended to provide an efficient means to emit information that
4
+ is needed for local or remote management and to receive commands from those sources.
5
+
6
+ The exposed API is intended to be agnostic to the underlying transport layer. The current implementation uses
7
+ ZeroMQ for reasons described in the top-level README.
8
+
9
+ # Installation
10
+ The package is published to the PyPi package index and can installed normally.
@@ -0,0 +1,38 @@
1
+ [build-system]
2
+ requires = ["hatchling"]
3
+ build-backend = "hatchling.build"
4
+
5
+ [tool.hatch.build.targets.wheel]
6
+ packages = ["src/autotrainer"]
7
+ namespaces = true
8
+
9
+ [tool.hatch.build.targets.sdist]
10
+ exclude = [
11
+ "/mkdocs.yml",
12
+ "/docs",
13
+ ]
14
+
15
+ [project]
16
+ name = "auto-trainer-api"
17
+ version = "0.9.0"
18
+ description = "API for interfacing with the core acquisition process via platform and language agnostic message queues."
19
+ license = { text = "AGPL-3.0-only" }
20
+ readme = "README.md"
21
+ requires-python = ">=3.8"
22
+ classifiers = [
23
+ "Programming Language :: Python :: 3",
24
+ "Operating System :: OS Independent"
25
+ ]
26
+ dependencies = [
27
+ "pyhumps==3.8.0",
28
+ "pyzmq==26.4"
29
+ ]
30
+
31
+ [project.optional-dependencies]
32
+ telemetry = [
33
+ "opentelemetry-api",
34
+ "opentelemetry-sdk"
35
+ ]
36
+ test = [
37
+ "pytest==8.2.0"
38
+ ]
@@ -0,0 +1,6 @@
1
+ from autotrainer.api import run_server
2
+
3
+ if __name__ == '__main__':
4
+ # Starts a simple application in lieu of a real auto-trainer application to publish heartbeat messages and receive
5
+ # command requests for development and testing.
6
+ run_server()
@@ -0,0 +1,27 @@
1
+ """
2
+ API Service functionality for Autotrainer.
3
+ """
4
+
5
+ from typing import Optional
6
+
7
+ from .api_options import ApiOptions, RpcOptions, TelemetryOptions, create_default_api_options
8
+ from .rpc_service import ApiTopic, RpcService, ApiCommandRequest, ApiCommandRequestResponse, ApiCommandReqeustResult
9
+ from .telemtry import configure_telemetry
10
+
11
+ from .util import patch_uuid_encoder
12
+
13
+ from .tools import run_server
14
+
15
+
16
+ def create_api_service(options: ApiOptions) -> Optional[RpcService]:
17
+ from .zeromq import ZeroMQApiService
18
+
19
+ # Several autotrainer messages may contain a UUID, which is not handled by the default JSON encoder.
20
+ # patch_uuid_encoder()
21
+
22
+ configure_telemetry(options.telemetry)
23
+
24
+ if options.rpc.enable:
25
+ return ZeroMQApiService(options.rpc)
26
+ else:
27
+ return None
@@ -0,0 +1,32 @@
1
+ from dataclasses import dataclass
2
+ from typing import Optional
3
+
4
+
5
+ @dataclass(frozen=True)
6
+ class TelemetryOptions:
7
+ enable: bool = False
8
+ endpoint: Optional[str] = None
9
+ api_key: str = ""
10
+
11
+
12
+ @dataclass(frozen=True)
13
+ class RpcOptions:
14
+ enable: bool = True
15
+ identifier: str = "autotrainer-device"
16
+ heartbeat_interval: int = 5
17
+ subscriber_port: int = 5556
18
+ command_port: int = 5557
19
+
20
+
21
+ @dataclass(frozen=True)
22
+ class ApiOptions:
23
+ rpc: Optional[RpcOptions] = None
24
+ telemetry: Optional[TelemetryOptions] = None
25
+
26
+
27
+ def create_default_api_options() -> ApiOptions:
28
+ """
29
+ Create default API options for the Autotrainer API service.
30
+ """
31
+
32
+ return ApiOptions(rpc=RpcOptions(), telemetry=TelemetryOptions())
@@ -0,0 +1,403 @@
1
+ import json
2
+ import logging
3
+ import time
4
+ from dataclasses import dataclass, asdict
5
+ from enum import IntEnum
6
+ from queue import Queue, Empty
7
+ from threading import Timer, Thread
8
+ from typing import Optional, Protocol, Any
9
+ from typing_extensions import Self
10
+
11
+ import humps
12
+
13
+ from .api_options import RpcOptions
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ class ApiTopic(IntEnum):
19
+ """
20
+ A topic is required for all published messages. This allows subscribers to filter messages through the message
21
+ queue functionality rather than seeing all messages and filtering themselves.
22
+
23
+ Any 4-byte integer value is valid.
24
+ """
25
+ ANY = 0,
26
+ HEARTBEAT = 1001,
27
+ """System heartbeat message indicating service availability."""
28
+ EVENT = 2001,
29
+ """
30
+ Data generated from the published under the 'Event' umbrella, which is typically major system/application events.
31
+ """
32
+ COMMAND_RESULT = 3001,
33
+ """ Responses to asynchronous command handling. """
34
+
35
+
36
+ class ApiCommand(IntEnum):
37
+ """
38
+ A command value is required for all command requests.
39
+ """
40
+ NONE = 0,
41
+ USER_DEFINED = 99999
42
+
43
+ @classmethod
44
+ def is_member(cls, value):
45
+ return value in cls._value2member_map_
46
+
47
+
48
+ @dataclass(frozen=True)
49
+ class ApiCommandRequest:
50
+ """
51
+ A command request contains the command and any associated data.
52
+ """
53
+ command: ApiCommand
54
+ custom_command: int = -1
55
+ nonce: int = -1
56
+ data: Optional[dict] = None
57
+
58
+ @classmethod
59
+ def parse_bytes(cls, message: bytes) -> Optional[Self]:
60
+ obj = json.loads(humps.decamelize(message.decode("utf8")))
61
+
62
+ return ApiCommandRequest.parse_object(obj)
63
+
64
+ @classmethod
65
+ def parse_object(cls, obj: Any) -> Optional[Self]:
66
+ if "command" in obj:
67
+ command = obj["command"]
68
+ if ApiCommand.is_member(command):
69
+ command = ApiCommand(command)
70
+ else:
71
+ command = ApiCommand.USER_DEFINED
72
+ if "custom_command" in obj:
73
+ custom_command = obj["custom_command"]
74
+ else:
75
+ custom_command = -1
76
+ if "data" in obj:
77
+ data = obj["data"]
78
+ else:
79
+ data = None
80
+ if "nonce" in obj:
81
+ nonce = obj["nonce"]
82
+ else:
83
+ nonce = 0
84
+
85
+ return ApiCommandRequest(command=command, custom_command=custom_command, nonce=nonce, data=data)
86
+
87
+ return None
88
+
89
+
90
+ class ApiCommandReqeustResult(IntEnum):
91
+ """
92
+ Result of a command request.
93
+ """
94
+ UNRECOGNIZED = 0,
95
+ SUCCESS = 1,
96
+ PENDING = 2,
97
+ PENDING_WITH_NOTIFICATION = 3,
98
+ FAILED = 4,
99
+ EXCEPTION = 5,
100
+ UNAVAILABLE = 9999
101
+
102
+
103
+ class ApiCommandRequestErrorKind(IntEnum):
104
+ NONE = 0,
105
+ SYSTEM_ERROR = 0x01,
106
+ COMMAND_ERROR = 0x02
107
+
108
+
109
+ ApiCommandRequestSystemErrorSerialization: int = 0
110
+
111
+
112
+ @dataclass(frozen=True)
113
+ class ApiCommandRequestResponse:
114
+ """
115
+ A command response contains the command and any associated data.
116
+
117
+ If the command request will have a result asynchronously in the future, one pattern would be to return some form
118
+ of context in the data field that the client can use as a reference in a future published message.
119
+ """
120
+ nonce: int
121
+ """
122
+ This will be set to 0 if a command request could not be deserialized. Caller should set the nonce to any value > 0
123
+ if they want to be able to identify commands that were not simple unrecognized, but unparseable.
124
+ """
125
+ command: ApiCommand
126
+ result: ApiCommandReqeustResult = ApiCommandReqeustResult.UNRECOGNIZED
127
+ data: Optional[dict] = None
128
+ error_kind: ApiCommandRequestErrorKind = ApiCommandRequestErrorKind.NONE
129
+ error_code: int = 0
130
+ error_message: Optional[str] = None
131
+
132
+ def as_bytes(self, allow_fallback: bool = True) -> bytes:
133
+ """
134
+ This is guaranteed to return a valid response, even if modified due to any errors in serialization. It must
135
+ not throw.
136
+
137
+ :param allow_fallback: true to allow serialization without the 'data' element if serialization initially fails.
138
+ :return: serialized message as bytes
139
+ """
140
+ try:
141
+ return humps.camelize(json.dumps(self.__dict__)).encode("utf8")
142
+ except Exception as ex:
143
+ logger.error(ex)
144
+
145
+ if allow_fallback:
146
+ # Assume it is an issue w/the contents of the user-definable dictionary contents
147
+ contents = self.__dict__
148
+ contents["data"] = None
149
+ # If the next attempt works, this will have been the situation.
150
+ contents["error_kind"] = ApiCommandRequestErrorKind.SYSTEM_ERROR
151
+ contents["error_code"] = ApiCommandRequestSystemErrorSerialization
152
+ contents["error_message"] = "An error occurred serializing the 'data' element of the response."
153
+ try:
154
+ return humps.camelize(json.dumps(contents)).encode("utf8")
155
+ except Exception as ex:
156
+ logger.error(ex)
157
+
158
+ serialization_error = {"nonce": self.nonce, "command": self.command, "result": self.result,
159
+ "error_kind": ApiCommandRequestErrorKind.SYSTEM_ERROR,
160
+ "error_code": ApiCommandRequestSystemErrorSerialization,
161
+ "error_message": "An error occurred serializing the response."}
162
+
163
+ return humps.camelize(json.dumps(serialization_error)).encode("utf8")
164
+
165
+ @staticmethod
166
+ def for_exception(command: ApiCommand, nonce: int, ex: Exception):
167
+ return ApiCommandRequestResponse(command=command, nonce=nonce, result=ApiCommandReqeustResult.EXCEPTION,
168
+ error_message=str(ex))
169
+
170
+
171
+ class CommandRequestDelegate(Protocol):
172
+ """
173
+ This callback is expected to be fast. It is intended to initiate a command, not necessarily complete it. Any
174
+ non-trivial action is expected to accept the command request and return, perform the action on a non-calling thread
175
+ or process, and use the message publishing API to report changes, results, etc.
176
+ """
177
+
178
+ def __call__(self, request: ApiCommandRequest) -> ApiCommandRequestResponse: ...
179
+
180
+
181
+ class ApiMessageQueueService(Protocol):
182
+ """
183
+ Minimum requirements to fulfill the API service message queue interface. Implementation details are left to the
184
+ implementation.
185
+
186
+ Implementations are required to be able to publish messages to one or more subscribers.
187
+ """
188
+
189
+ def send(self, topic: ApiTopic, data: bytes) -> bool: ...
190
+
191
+ def send_string(self, topic: ApiTopic, message: str) -> bool: ...
192
+
193
+ def send_dict(self, topic: ApiTopic, message: dict) -> bool: ...
194
+
195
+
196
+ class ApiCommandRequestService(Protocol):
197
+ """
198
+ Minimum requirements to fulfill the API service command provider interface. Implementation details are left to the
199
+ implementation.
200
+
201
+ Implementations are required to be able to receive command requests from one or more clients, deliver those requests
202
+ to a registered handler, and provide an immediate response to the requester. The response is a response to the
203
+ _command request_ not necessarily the response to the command itself. See CommandCallback for additional details.
204
+ """
205
+
206
+ @property
207
+ def command_request_delegate(self) -> Optional[CommandRequestDelegate]: ...
208
+
209
+ @command_request_delegate.setter
210
+ def command_request_delegate(self, value: Optional[CommandRequestDelegate]): ...
211
+
212
+
213
+ @dataclass
214
+ class HeartbeatMessage:
215
+ identifier: str
216
+ version: str
217
+ timestamp: float
218
+
219
+
220
+ class RpcService(ApiMessageQueueService, ApiCommandRequestService):
221
+ def __init__(self, options: RpcOptions):
222
+ self._subscriber_port = options.subscriber_port
223
+ self._command_port = options.command_port
224
+ self._identifier = options.identifier
225
+ self._heartbeat_interval = options.heartbeat_interval
226
+ self._heartbeat: HeartbeatMessage = HeartbeatMessage(
227
+ identifier=options.identifier,
228
+ version="0.9.0",
229
+ timestamp=0.0
230
+ )
231
+
232
+ self._command_callback = None
233
+
234
+ self._thread = None
235
+
236
+ self._termination_requested = False
237
+
238
+ self._queue = Queue()
239
+
240
+ @property
241
+ def subscriber_port(self) -> int:
242
+ return self._subscriber_port
243
+
244
+ @property
245
+ def command_port(self) -> int:
246
+ return self._command_port
247
+
248
+ @property
249
+ def identifier(self) -> str:
250
+ return self._identifier
251
+
252
+ @property
253
+ def heartbeat_interval(self) -> int:
254
+ return self._heartbeat_interval
255
+
256
+ @property
257
+ def command_request_delegate(self) -> Optional[CommandRequestDelegate]:
258
+ return self._command_callback
259
+
260
+ @command_request_delegate.setter
261
+ def command_request_delegate(self, value: Optional[CommandRequestDelegate]):
262
+ self._command_callback = value
263
+
264
+ def start(self):
265
+ if self._thread is None or not self._thread.is_alive():
266
+ self._termination_requested = False
267
+ self._queue = Queue()
268
+ self._thread = Thread(target=self._run)
269
+ self._thread.start()
270
+
271
+ def stop(self):
272
+ self._termination_requested = True
273
+
274
+ def send(self, topic: ApiTopic, data: bytes) -> bool:
275
+ self._queue.put(lambda: self._send(topic, data))
276
+ return True
277
+
278
+ def send_string(self, topic: ApiTopic, message: str) -> bool:
279
+ self._queue.put(lambda: self._send_string(topic, message))
280
+ return True
281
+
282
+ def send_dict(self, topic: ApiTopic, message: dict) -> bool:
283
+ try:
284
+ self._queue.put(lambda: self._send_dict(topic, message))
285
+ return True
286
+ except:
287
+ pass
288
+
289
+ return False
290
+
291
+ def _run(self):
292
+ try:
293
+ if not self._start():
294
+ logger.error(f"failed to start api service")
295
+ self._update_after_run()
296
+ return
297
+ except Exception:
298
+ logger.exception("exception starting api service")
299
+ self._update_after_run()
300
+ return
301
+
302
+ self._queue_heartbeat()
303
+
304
+ while not self._termination_requested:
305
+ try:
306
+ # Expected to be non-blocking.
307
+ request = self._get_next_command_request()
308
+
309
+ if request is not None:
310
+ # Expected to happen fast. Most message queue implementations that provide a request/response-type
311
+ # pattern require that a response be sent before the next request is received. And the associated
312
+ # client/caller implementation requires the response before accepting another request.
313
+ #
314
+ # If this becomes an untenable requirement, a different pattern will be required for command
315
+ # requests (and the underlying implementations update). However, anything that allow interleaving
316
+ # multiple requests from the same client with responses would effectively be the same as this
317
+ # pattern where there is an immediate _request_ response and a delayed _command_ response through
318
+ # the message queue.
319
+ #
320
+ # NOTE: There is no inherent limitation in this pattern with multiple requests from multiple
321
+ # clients. This is related to the handling of each individual client.
322
+
323
+ if self._command_callback is not None:
324
+ try:
325
+ # Can not assume the registered delegate will be well-behaved.
326
+ self._send_command_response(self._command_callback(request))
327
+ except Exception as e:
328
+ self._send_command_response(
329
+ ApiCommandRequestResponse.for_exception(request.command, request.nonce, e))
330
+ else:
331
+ # Must provide a response, even if no one that registered this service cares (using for the
332
+ # message queue only, etc.).
333
+ self._send_command_response(
334
+ ApiCommandRequestResponse(command=request.command, nonce=request.nonce,
335
+ result=ApiCommandReqeustResult.UNAVAILABLE))
336
+
337
+ # Expected to be non-blocking.
338
+ action = self._queue.get(timeout=0.05)
339
+
340
+ # This is performed by the implementation. It should be exception safe or the implementation should be
341
+ # updated.
342
+ action()
343
+
344
+ except Empty:
345
+ # Expected from get_nowait() if there is nothing in the queue.
346
+ pass
347
+
348
+ time.sleep(0.1)
349
+
350
+ self._update_after_run()
351
+
352
+ def _update_after_run(self):
353
+ self._cancel_heartbeat()
354
+
355
+ self._stop()
356
+
357
+ self._thread = None
358
+
359
+ def _queue_heartbeat(self):
360
+ self._heartbeat_timer = Timer(self._heartbeat_interval, self._heartbeat_timer_callback)
361
+ self._heartbeat_timer.start()
362
+
363
+ def _cancel_heartbeat(self):
364
+ if self._heartbeat_timer is not None:
365
+ self._heartbeat_timer.cancel()
366
+ self._heartbeat_timer = None
367
+
368
+ def _heartbeat_timer_callback(self):
369
+ # Must happen on the queue processing thread.
370
+ if not self._termination_requested:
371
+ self._heartbeat.timestamp = time.time()
372
+ self._send_dict(ApiTopic.HEARTBEAT, asdict(self._heartbeat))
373
+ self._queue_heartbeat()
374
+
375
+ def _start(self) -> bool:
376
+ raise NotImplementedError("Subclasses must implement _start()")
377
+
378
+ def _stop(self):
379
+ raise NotImplementedError("Subclasses must implement _stop()")
380
+
381
+ def _send(self, topic: ApiTopic, data: bytes) -> bool:
382
+ return False
383
+
384
+ def _send_string(self, topic: ApiTopic, message: str) -> bool:
385
+ return False
386
+
387
+ def _send_dict(self, topic: ApiTopic, message: dict) -> bool:
388
+ return False
389
+
390
+ def _get_next_command_request(self) -> Optional[ApiCommandRequest]:
391
+ """
392
+ If a command request is returned, the subclass/implementation can assume that a responses will be sent
393
+ (via the `_command_response()_` method). If something is received by the implementation that can not be
394
+ returned as a valid command request (malformed, incomplete, etc.), it is the responsibility of the
395
+ implementation to provide a response if that is required by the particular implementation (e.g., request/reply
396
+ requiring a response to every request, etc.).
397
+
398
+ :return: a command request if available, None otherwise
399
+ """
400
+ return None
401
+
402
+ def _send_command_response(self, response: ApiCommandRequestResponse):
403
+ pass
@@ -0,0 +1 @@
1
+ from .open_telemetry_service import configure_telemetry
@@ -0,0 +1,51 @@
1
+ import importlib.util
2
+ import logging
3
+ import os
4
+ from typing import Optional
5
+
6
+ from ..api_options import TelemetryOptions
7
+
8
+ _spec_opentelemetry = importlib.util.find_spec("opentelemetry")
9
+
10
+ logger = logging.getLogger(__name__)
11
+
12
+
13
+ def configure_telemetry(options: Optional[TelemetryOptions]) -> bool:
14
+ if options is None or not options.enable:
15
+ logger.debug(f"telemetry options {'missing' if options is None else 'disabled'}.")
16
+ return False
17
+
18
+ if _spec_opentelemetry is None:
19
+ logger.warning("telemetry enabled however a required dependency is missing.")
20
+ return False
21
+
22
+ endpoint = options.endpoint if options.endpoint is not None else os.environ.get("OTEL_EXPORTER_OTLP_ENDPOINT")
23
+
24
+ if endpoint is None:
25
+ logger.warning("telemetry enabled however the endpoint is not specified.")
26
+ return False
27
+
28
+ from opentelemetry.sdk.resources import SERVICE_NAME, Resource
29
+
30
+ from opentelemetry import trace
31
+ from opentelemetry.sdk.trace import TracerProvider
32
+ from opentelemetry.exporter.otlp.proto.http.trace_exporter import OTLPSpanExporter
33
+ from opentelemetry.sdk.trace.export import BatchSpanProcessor
34
+
35
+ from opentelemetry import metrics
36
+ from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
37
+ from opentelemetry.sdk.metrics import MeterProvider
38
+ from opentelemetry.sdk.metrics.export import PeriodicExportingMetricReader
39
+
40
+ resource = Resource(attributes={SERVICE_NAME: "auto-trainer"})
41
+
42
+ trace_provider = TracerProvider(resource=resource)
43
+ processor = BatchSpanProcessor(OTLPSpanExporter(endpoint=f"{endpoint}/v1/traces"))
44
+ trace_provider.add_span_processor(processor)
45
+ trace.set_tracer_provider(trace_provider)
46
+
47
+ reader = PeriodicExportingMetricReader(OTLPMetricExporter(endpoint=f"{endpoint}/v1/metrics"))
48
+ meter_provider = MeterProvider(resource=resource, metric_readers=[reader])
49
+ metrics.set_meter_provider(meter_provider)
50
+
51
+ return True
@@ -0,0 +1 @@
1
+ from .pub_sub_server import run_server
@@ -0,0 +1,39 @@
1
+ """
2
+ Create an instance of an Api Service implementation to received published messages and send command requests.
3
+ """
4
+
5
+ import logging
6
+
7
+ from ..rpc_service import ApiCommandRequest, ApiCommandRequestResponse, ApiCommandReqeustResult
8
+ from ..zeromq import ZeroMQApiService
9
+ from ..api_options import create_default_api_options
10
+
11
+ logging.basicConfig(level=logging.DEBUG, format="%(asctime)s %(levelname)s\t [%(name)s] %(message)s")
12
+ logging.getLogger('autotrainer').setLevel(logging.DEBUG)
13
+ logging.getLogger('tools').setLevel(logging.DEBUG)
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def _respond_to_command_request(request: ApiCommandRequest) -> ApiCommandRequestResponse:
19
+ logger.debug("Received command request: %s", request.command)
20
+ return ApiCommandRequestResponse(command=request.command, data={"seen": True},
21
+ result=ApiCommandReqeustResult.SUCCESS)
22
+
23
+
24
+ def run_server():
25
+ options = create_default_api_options()
26
+
27
+ service = ZeroMQApiService(options.rpc)
28
+
29
+ service.command_request_delegate = _respond_to_command_request
30
+
31
+ service.start()
32
+
33
+ input("Press enter to stop the service...\n")
34
+
35
+ service.stop()
36
+
37
+
38
+ if __name__ == '__main__':
39
+ run_server()
@@ -0,0 +1,40 @@
1
+ import socket
2
+ from datetime import datetime
3
+ from json import JSONEncoder
4
+ from uuid import UUID
5
+
6
+ _IS_JSON_ENCODER_PATCHED = False
7
+
8
+
9
+ def get_ip4_addr_str() -> str:
10
+ # gethostname() and gethostbyname() and associated IP lookup have proven unreliable on deployed devices where the
11
+ # configuration is not perfect. This method assumes access to the internet (Google DNS) which has its own
12
+ # limitations. A more complex implementation to manage all conditions but avoid ending up with 12.0.0.1 when an
13
+ # actual address is available is needed.
14
+ s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
15
+ try:
16
+ s.connect(("8.8.8.8", 80))
17
+ ip = s.getsockname()[0]
18
+ except Exception:
19
+ ip = "127.0.0.1"
20
+ finally:
21
+ s.close()
22
+
23
+ return ip
24
+
25
+
26
+ def patch_uuid_encoder():
27
+ global _IS_JSON_ENCODER_PATCHED
28
+
29
+ if not _IS_JSON_ENCODER_PATCHED:
30
+ JSONEncoder.default = UUIDEncoder.default
31
+ _IS_JSON_ENCODER_PATCHED = True
32
+
33
+
34
+ class UUIDEncoder(JSONEncoder):
35
+ def default(self, obj):
36
+ if isinstance(obj, UUID):
37
+ return str(obj)
38
+ if isinstance(obj, datetime):
39
+ return obj.timestamp()
40
+ return super().default(obj)
@@ -0,0 +1 @@
1
+ from .zeromq_api_service import ZeroMQApiService
@@ -0,0 +1,123 @@
1
+ import json
2
+ import logging
3
+ from typing import Optional
4
+
5
+ import zmq
6
+ import humps
7
+
8
+ from ..rpc_service import RpcService, RpcOptions, ApiTopic, ApiCommandRequestResponse, ApiCommandRequest, ApiCommand
9
+ from ..util import get_ip4_addr_str, UUIDEncoder
10
+
11
+ logger = logging.getLogger(__name__)
12
+
13
+
14
+ class ZeroMQApiService(RpcService):
15
+ def __init__(self, options: RpcOptions):
16
+ super().__init__(options)
17
+
18
+ ips = [get_ip4_addr_str()]
19
+
20
+ if ips[0] != "127.0.0.1":
21
+ ips.append("127.0.0.1")
22
+
23
+ self._pub_addresses = [f"tcp://{ip}:{self.subscriber_port}" for ip in ips]
24
+ self._pub_socket = None
25
+
26
+ self._cmd_addresses = [f"tcp://{ip}:{self.command_port}" for ip in ips]
27
+ self._cmd_socket = None
28
+
29
+ self._response_pending = False
30
+
31
+ def _start(self) -> bool:
32
+ if self._pub_socket is not None:
33
+ return True
34
+
35
+ context = zmq.Context()
36
+
37
+ self._pub_socket = context.socket(zmq.PUB)
38
+ for address in self._pub_addresses:
39
+ self._pub_socket.bind(address)
40
+ logger.debug(f"ZMQ PUB socket bound to {address}")
41
+
42
+ context = zmq.Context()
43
+
44
+ self._cmd_socket = context.socket(zmq.REP)
45
+ for address in self._cmd_addresses:
46
+ self._cmd_socket.bind(address)
47
+ logger.debug(f"ZMQ REP socket bound to {address}")
48
+
49
+ return True
50
+
51
+ def _stop(self):
52
+ if self._pub_socket is not None:
53
+ for address in self._pub_addresses:
54
+ self._pub_socket.disconnect(address)
55
+ self._pub_socket = None
56
+ if self._cmd_socket is not None:
57
+ for address in self._cmd_addresses:
58
+ self._cmd_socket.disconnect(address)
59
+ self._cmd_socket = None
60
+
61
+ def _send(self, topic: ApiTopic, data: bytes):
62
+ if self._pub_socket is not None:
63
+ self._pub_socket.send(topic.to_bytes(4, "little"), flags=zmq.SNDMORE)
64
+ self._pub_socket.send(data)
65
+
66
+ def _send_string(self, topic: ApiTopic, message: str):
67
+ if self._pub_socket is not None:
68
+ self._pub_socket.send(topic.to_bytes(4, "little"), flags=zmq.SNDMORE)
69
+ self._pub_socket.send(message.encode("utf8"))
70
+
71
+ def _send_dict(self, topic: ApiTopic, message: dict):
72
+ if self._pub_socket is not None:
73
+ try:
74
+ # Convert the dictionary to a JSON string and then encode it to bytes.
75
+ json_data = humps.camelize(json.dumps(message, cls=UUIDEncoder))
76
+ self._pub_socket.send(topic.to_bytes(4, "little"), flags=zmq.SNDMORE)
77
+ self._pub_socket.send_json(json_data)
78
+ except Exception as ex:
79
+ logger.error(ex)
80
+
81
+ def _get_next_command_request(self) -> Optional[ApiCommandRequest]:
82
+ if self._cmd_socket is not None and not self._response_pending:
83
+ try:
84
+ message = self._cmd_socket.recv(flags=zmq.NOBLOCK)
85
+
86
+ self._response_pending = True
87
+
88
+ request = ZeroMQApiService._parse_command_request(message)
89
+
90
+ if request is None:
91
+ # If a request was received, but could not be parsed, the requester is still expecting a response.
92
+ # Otherwise, the ZeroMQ socket on both ends will be in a lingering state. Send it ourselves since
93
+ # returning None will not generate a response by the caller.
94
+ self._send_command_response(ApiCommandRequestResponse(command=ApiCommand.NONE, nonce=0))
95
+
96
+ # A response will be sent by the caller if a request is returned.
97
+ return request
98
+
99
+ except zmq.Again:
100
+ pass
101
+
102
+ return None
103
+
104
+ def _send_command_response(self, response: ApiCommandRequestResponse):
105
+ if self._cmd_socket is not None:
106
+ # This is guaranteed to return a valid response, even if modified due to any errors in serialization.
107
+ data = response.as_bytes(True)
108
+
109
+ self._cmd_socket.send(data)
110
+
111
+ self._response_pending = False
112
+
113
+ @staticmethod
114
+ def _parse_command_request(message: bytes) -> Optional[ApiCommandRequest]:
115
+ try:
116
+ return ApiCommandRequest.parse_bytes(message)
117
+ except json.decoder.JSONDecodeError as ex:
118
+ # Might do something different here.
119
+ logger.error(ex)
120
+ except Exception as ex:
121
+ logger.error(ex)
122
+
123
+ return None