matrice-inference 0.1.0__py3-none-manylinux_2_17_x86_64.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of matrice-inference might be problematic. Click here for more details.

Files changed (80) hide show
  1. matrice_inference/deploy/aggregator/aggregator.cpython-312-x86_64-linux-gnu.so +0 -0
  2. matrice_inference/deploy/aggregator/aggregator.pyi +55 -0
  3. matrice_inference/deploy/aggregator/analytics.cpython-312-x86_64-linux-gnu.so +0 -0
  4. matrice_inference/deploy/aggregator/analytics.pyi +63 -0
  5. matrice_inference/deploy/aggregator/ingestor.cpython-312-x86_64-linux-gnu.so +0 -0
  6. matrice_inference/deploy/aggregator/ingestor.pyi +79 -0
  7. matrice_inference/deploy/aggregator/pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
  8. matrice_inference/deploy/aggregator/pipeline.pyi +139 -0
  9. matrice_inference/deploy/aggregator/publisher.cpython-312-x86_64-linux-gnu.so +0 -0
  10. matrice_inference/deploy/aggregator/publisher.pyi +59 -0
  11. matrice_inference/deploy/aggregator/synchronizer.cpython-312-x86_64-linux-gnu.so +0 -0
  12. matrice_inference/deploy/aggregator/synchronizer.pyi +58 -0
  13. matrice_inference/deploy/client/auto_streaming/auto_streaming.cpython-312-x86_64-linux-gnu.so +0 -0
  14. matrice_inference/deploy/client/auto_streaming/auto_streaming.pyi +145 -0
  15. matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  16. matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.pyi +126 -0
  17. matrice_inference/deploy/client/client.cpython-312-x86_64-linux-gnu.so +0 -0
  18. matrice_inference/deploy/client/client.pyi +337 -0
  19. matrice_inference/deploy/client/client_stream_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  20. matrice_inference/deploy/client/client_stream_utils.pyi +83 -0
  21. matrice_inference/deploy/client/client_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  22. matrice_inference/deploy/client/client_utils.pyi +77 -0
  23. matrice_inference/deploy/client/streaming_gateway/streaming_gateway.cpython-312-x86_64-linux-gnu.so +0 -0
  24. matrice_inference/deploy/client/streaming_gateway/streaming_gateway.pyi +120 -0
  25. matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  26. matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.pyi +442 -0
  27. matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.cpython-312-x86_64-linux-gnu.so +0 -0
  28. matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.pyi +19 -0
  29. matrice_inference/deploy/optimize/cache_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  30. matrice_inference/deploy/optimize/cache_manager.pyi +15 -0
  31. matrice_inference/deploy/optimize/frame_comparators.cpython-312-x86_64-linux-gnu.so +0 -0
  32. matrice_inference/deploy/optimize/frame_comparators.pyi +203 -0
  33. matrice_inference/deploy/optimize/frame_difference.cpython-312-x86_64-linux-gnu.so +0 -0
  34. matrice_inference/deploy/optimize/frame_difference.pyi +165 -0
  35. matrice_inference/deploy/optimize/transmission.cpython-312-x86_64-linux-gnu.so +0 -0
  36. matrice_inference/deploy/optimize/transmission.pyi +97 -0
  37. matrice_inference/deploy/server/inference/batch_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  38. matrice_inference/deploy/server/inference/batch_manager.pyi +50 -0
  39. matrice_inference/deploy/server/inference/inference_interface.cpython-312-x86_64-linux-gnu.so +0 -0
  40. matrice_inference/deploy/server/inference/inference_interface.pyi +114 -0
  41. matrice_inference/deploy/server/inference/model_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  42. matrice_inference/deploy/server/inference/model_manager.pyi +80 -0
  43. matrice_inference/deploy/server/inference/triton_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  44. matrice_inference/deploy/server/inference/triton_utils.pyi +115 -0
  45. matrice_inference/deploy/server/proxy/proxy_interface.cpython-312-x86_64-linux-gnu.so +0 -0
  46. matrice_inference/deploy/server/proxy/proxy_interface.pyi +90 -0
  47. matrice_inference/deploy/server/proxy/proxy_utils.cpython-312-x86_64-linux-gnu.so +0 -0
  48. matrice_inference/deploy/server/proxy/proxy_utils.pyi +113 -0
  49. matrice_inference/deploy/server/server.cpython-312-x86_64-linux-gnu.so +0 -0
  50. matrice_inference/deploy/server/server.pyi +155 -0
  51. matrice_inference/deploy/server/stream/inference_worker.cpython-312-x86_64-linux-gnu.so +0 -0
  52. matrice_inference/deploy/server/stream/inference_worker.pyi +56 -0
  53. matrice_inference/deploy/server/stream/kafka_consumer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
  54. matrice_inference/deploy/server/stream/kafka_consumer_worker.pyi +51 -0
  55. matrice_inference/deploy/server/stream/kafka_producer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
  56. matrice_inference/deploy/server/stream/kafka_producer_worker.pyi +50 -0
  57. matrice_inference/deploy/server/stream/stream_debug_logger.cpython-312-x86_64-linux-gnu.so +0 -0
  58. matrice_inference/deploy/server/stream/stream_debug_logger.pyi +47 -0
  59. matrice_inference/deploy/server/stream/stream_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  60. matrice_inference/deploy/server/stream/stream_manager.pyi +69 -0
  61. matrice_inference/deploy/server/stream/video_buffer.cpython-312-x86_64-linux-gnu.so +0 -0
  62. matrice_inference/deploy/server/stream/video_buffer.pyi +120 -0
  63. matrice_inference/deploy/stream/kafka_stream.cpython-312-x86_64-linux-gnu.so +0 -0
  64. matrice_inference/deploy/stream/kafka_stream.pyi +444 -0
  65. matrice_inference/deploy/stream/redis_stream.cpython-312-x86_64-linux-gnu.so +0 -0
  66. matrice_inference/deploy/stream/redis_stream.pyi +447 -0
  67. matrice_inference/deployment/camera_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  68. matrice_inference/deployment/camera_manager.pyi +669 -0
  69. matrice_inference/deployment/deployment.cpython-312-x86_64-linux-gnu.so +0 -0
  70. matrice_inference/deployment/deployment.pyi +736 -0
  71. matrice_inference/deployment/inference_pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
  72. matrice_inference/deployment/inference_pipeline.pyi +527 -0
  73. matrice_inference/deployment/streaming_gateway_manager.cpython-312-x86_64-linux-gnu.so +0 -0
  74. matrice_inference/deployment/streaming_gateway_manager.pyi +275 -0
  75. matrice_inference/py.typed +0 -0
  76. matrice_inference-0.1.0.dist-info/METADATA +26 -0
  77. matrice_inference-0.1.0.dist-info/RECORD +80 -0
  78. matrice_inference-0.1.0.dist-info/WHEEL +5 -0
  79. matrice_inference-0.1.0.dist-info/licenses/LICENSE.txt +21 -0
  80. matrice_inference-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,444 @@
1
+ """Auto-generated stub for module: kafka_stream."""
2
+ from typing import Any, Dict, List, Optional, Set, Tuple, Union
3
+
4
+ from aiokafka import AIOKafkaProducer, AIOKafkaConsumer
5
+ from aiokafka.consumer.subscription_state import ConsumerRebalanceListener
6
+ from aiokafka.errors import KafkaError
7
+ from collections import deque
8
+ from confluent_kafka import Consumer, Producer, KafkaError, TopicPartition, OFFSET_INVALID
9
+ from confluent_kafka.admin import AdminClient, NewTopic
10
+ from datetime import datetime, timezone
11
+ import asyncio
12
+ import json
13
+ import logging
14
+ import threading
15
+ import time
16
+ import uuid
17
+
18
+ # Classes
19
+ class AsyncKafkaUtils:
20
+ """
21
+ Utility class for asynchronous Kafka operations.
22
+ """
23
+
24
+ def __init__(self: Any, bootstrap_servers: str, sasl_mechanism: Optional[str] = 'SCRAM-SHA-256', sasl_username: Optional[str] = 'matrice-sdk-user', sasl_password: Optional[str] = 'matrice-sdk-password', security_protocol: str = 'SASL_PLAINTEXT') -> None: ...
25
+ """
26
+ Initialize async Kafka utils with bootstrap servers and SASL configuration.
27
+
28
+ Args:
29
+ bootstrap_servers: Comma-separated list of Kafka broker addresses
30
+ sasl_mechanism: SASL mechanism for authentication
31
+ sasl_username: Username for SASL authentication
32
+ sasl_password: Password for SASL authentication
33
+ security_protocol: Security protocol for Kafka connection
34
+ """
35
+
36
+ async def close(self: Any) -> None: ...
37
+ """
38
+ Close async Kafka producer and consumer connections.
39
+ """
40
+
41
+ def configure_metrics_reporting(self: Any, rpc_client: Any, service_id: str = None, interval: int = 60, batch_size: int = 1000) -> None: ...
42
+ """
43
+ Configure background metrics reporting to backend API.
44
+
45
+ Args:
46
+ rpc_client: RPC client instance for API communication
47
+ deployment_id: Deployment identifier for metrics context
48
+ interval: Reporting interval in seconds (default: 60)
49
+ batch_size: Maximum metrics per batch (default: 1000)
50
+ """
51
+
52
+ async def consume_message(self: Any, timeout: float = 60.0) -> Optional[Dict]: ...
53
+ """
54
+ Consume a single message from Kafka.
55
+
56
+ Args:
57
+ timeout: Maximum time to wait for message in seconds
58
+
59
+ Returns:
60
+ Message dictionary if available, None if no message received
61
+
62
+ Raises:
63
+ RuntimeError: If consumer is not initialized
64
+ AsyncKafkaError: If message consumption fails
65
+ """
66
+
67
+ def get_metrics(self: Any, clear_after_read: bool = False) -> List[Dict]: ...
68
+ """
69
+ Get collected metrics for aggregation and reporting.
70
+
71
+ Args:
72
+ clear_after_read: Whether to clear metrics after reading
73
+
74
+ Returns:
75
+ List of metric dictionaries
76
+ """
77
+
78
+ async def produce_message(self: Any, topic: str, value: Union[dict, str, bytes, Any], key: Optional[Union[str, bytes, Any]] = None, headers: Optional[List[Tuple[str, bytes]]] = None, timeout: float = 30.0) -> None: ...
79
+ """
80
+ Produce a message to a Kafka topic.
81
+
82
+ Args:
83
+ topic: Topic to produce to
84
+ value: Message value (dict will be converted to JSON)
85
+ key: Optional message key
86
+ headers: Optional message headers
87
+ timeout: Maximum time to wait for message delivery in seconds
88
+
89
+ Raises:
90
+ RuntimeError: If producer is not initialized
91
+ ValueError: If topic or value is invalid
92
+ AsyncKafkaError: If message production fails
93
+ """
94
+
95
+ async def setup_consumer(self: Any, topics: List[str], group_id: str, group_instance_id: str = None, config: Optional[Dict] = None) -> None: ...
96
+ """
97
+ Set up async Kafka consumer.
98
+
99
+ Args:
100
+ topics: List of topics to subscribe to
101
+ group_id: Consumer group ID
102
+ group_instance_id: Consumer group instance ID for static membership
103
+ config: Additional consumer configuration
104
+
105
+ Raises:
106
+ ValueError: If topics list is empty
107
+ AsyncKafkaError: If consumer initialization fails
108
+ """
109
+
110
+ async def setup_producer(self: Any, config: Optional[Dict] = None) -> None: ...
111
+ """
112
+ Set up async Kafka producer.
113
+
114
+ Args:
115
+ config: Additional producer configuration
116
+
117
+ Raises:
118
+ AsyncKafkaError: If producer initialization fails
119
+ """
120
+
121
+ def stop_metrics_reporting(self: Any) -> None: ...
122
+ """
123
+ Stop the background metrics reporting thread (async version).
124
+ """
125
+
126
+ class KafkaUtils:
127
+ """
128
+ Utility class for synchronous Kafka operations.
129
+ """
130
+
131
+ def __init__(self: Any, bootstrap_servers: str, sasl_mechanism: Optional[str] = 'SCRAM-SHA-256', sasl_username: Optional[str] = 'matrice-sdk-user', sasl_password: Optional[str] = 'matrice-sdk-password', security_protocol: str = 'SASL_PLAINTEXT') -> None: ...
132
+ """
133
+ Initialize Kafka utils with bootstrap servers and SASL configuration.
134
+
135
+ Args:
136
+ bootstrap_servers: Comma-separated list of Kafka broker addresses
137
+ sasl_mechanism: SASL mechanism for authentication
138
+ sasl_username: Username for SASL authentication
139
+ sasl_password: Password for SASL authentication
140
+ security_protocol: Security protocol for Kafka connection
141
+ """
142
+
143
+ def close(self: Any) -> None: ...
144
+ """
145
+ Close Kafka producer and consumer connections.
146
+ """
147
+
148
+ def configure_metrics_reporting(self: Any, rpc_client: Any, service_id: str = None, interval: int = 60, batch_size: int = 1000) -> None: ...
149
+ """
150
+ Configure background metrics reporting to backend API.
151
+
152
+ Args:
153
+ rpc_client: RPC client instance for API communication
154
+ deployment_id: Deployment identifier for metrics context
155
+ interval: Reporting interval in seconds (default: 60)
156
+ batch_size: Maximum metrics per batch (default: 1000)
157
+ """
158
+
159
+ def consume_message(self: Any, timeout: float = 1.0) -> Optional[Dict]: ...
160
+ """
161
+ Consume single message from subscribed topics.
162
+
163
+ Args:
164
+ timeout: Maximum time to block waiting for message in seconds
165
+
166
+ Returns:
167
+ Message dict if available, None if timeout. Dict contains:
168
+ - topic: Topic name
169
+ - partition: Partition number
170
+ - offset: Message offset
171
+ - key: Message key (if present)
172
+ - value: Message value
173
+ - headers: Message headers (if present)
174
+ - timestamp: Message timestamp
175
+
176
+ Raises:
177
+ RuntimeError: If consumer is not set up
178
+ KafkaError: If message consumption fails
179
+ """
180
+
181
+ def create_topic_dynamic(self: Any, topic: str, partitions: int, replication: int, kafka_ip: str = None, kafka_port: str = None) -> bool: ...
182
+ """
183
+ Create a Kafka topic dynamically - equivalent to Go CreateTopic().
184
+
185
+ Args:
186
+ topic: Topic name to create
187
+ partitions: Number of partitions
188
+ replication: Replication factor
189
+ kafka_ip: Kafka server IP (optional, uses existing bootstrap_servers if None)
190
+ kafka_port: Kafka server port (optional, uses existing bootstrap_servers if None)
191
+
192
+ Returns:
193
+ bool: True if topic was created successfully, False otherwise
194
+ """
195
+
196
+ def get_consumer(self: Any, topic: str = None, group_id: str = None, ip: str = None, port: str = None) -> Optional[Consumer]: ...
197
+ """
198
+ Get existing consumer instance or create new one - equivalent to Go GetConsumer().
199
+
200
+ Args:
201
+ topic: Topic to subscribe to (optional if consumer already set up)
202
+ group_id: Consumer group ID (optional if consumer already set up)
203
+ ip: Kafka server IP (ignored if consumer already set up)
204
+ port: Kafka server port (ignored if consumer already set up)
205
+
206
+ Returns:
207
+ Consumer instance (existing self.consumer) or newly created consumer
208
+ """
209
+
210
+ def get_metrics(self: Any, clear_after_read: bool = False) -> List[Dict]: ...
211
+ """
212
+ Get collected metrics for aggregation and reporting.
213
+
214
+ Args:
215
+ clear_after_read: Whether to clear metrics after reading
216
+
217
+ Returns:
218
+ List of metric dictionaries
219
+ """
220
+
221
+ def produce_message(self: Any, topic: str, value: Union[dict, str, bytes, Any], key: Optional[Union[str, bytes, Any]] = None, headers: Optional[List[Tuple]] = None, timeout: float = 30.0, wait_for_delivery: bool = False) -> None: ...
222
+ """
223
+ Produce message to Kafka topic.
224
+
225
+ Args:
226
+ topic: Topic to produce to
227
+ value: Message value (dict will be converted to JSON)
228
+ key: Optional message key
229
+ headers: Optional list of (key, value) tuples for message headers
230
+ timeout: Maximum time to wait for message delivery in seconds
231
+ wait_for_delivery: Whether to wait for delivery confirmation
232
+
233
+ Raises:
234
+ RuntimeError: If producer is not set up
235
+ KafkaError: If message production fails
236
+ ValueError: If topic is empty or value is None
237
+ """
238
+
239
+ def publish_message_with_timestamp(self: Any, topic: str, key: Any, value: Any, ip: str = None, port: str = None) -> bool: ...
240
+ """
241
+ Publish message using Kafka message timestamp (no headers) - equivalent to Go Publish().
242
+
243
+ Args:
244
+ topic: Topic to publish to
245
+ key: Message key as bytes
246
+ value: Message value as bytes
247
+ ip: Kafka server IP (ignored if producer already set up)
248
+ port: Kafka server port (ignored if producer already set up)
249
+
250
+ Returns:
251
+ bool: True if message was published successfully, False otherwise
252
+ """
253
+
254
+ def read_consumer_with_latency(self: Any, consumer: Any = None, ip: str = None, port: str = None) -> Tuple[Optional[Dict], Optional[float], Optional[str]]: ...
255
+ """
256
+ Read message from consumer with latency calculation - equivalent to Go ReadConsumer().
257
+
258
+ Args:
259
+ consumer: Consumer instance to read from (uses self.consumer if None)
260
+ ip: Kafka server IP (ignored, for Go compatibility)
261
+ port: Kafka server port (ignored, for Go compatibility)
262
+
263
+ Returns:
264
+ Tuple of (message_dict, latency_seconds, error_string)
265
+ """
266
+
267
+ def setup_consumer(self: Any, topics: List[str], group_id: str, group_instance_id: str = None, config: Optional[Dict] = None) -> None: ...
268
+ """
269
+ Set up Kafka consumer for given topics.
270
+
271
+ Args:
272
+ topics: List of topics to subscribe to
273
+ group_id: Consumer group ID
274
+ group_instance_id: Consumer group instance ID for static membership
275
+ config: Additional consumer configuration
276
+
277
+ Raises:
278
+ KafkaError: If consumer initialization or subscription fails
279
+ ValueError: If topics list is empty
280
+ """
281
+
282
+ def setup_producer(self: Any, config: Optional[Dict] = None) -> None: ...
283
+ """
284
+ Set up Kafka producer with optional config.
285
+
286
+ Args:
287
+ config: Additional producer configuration
288
+
289
+ Raises:
290
+ KafkaError: If producer initialization fails
291
+ """
292
+
293
+ def stop_metrics_reporting(self: Any) -> None: ...
294
+ """
295
+ Stop the background metrics reporting thread.
296
+ """
297
+
298
+ class MatriceKafkaDeployment:
299
+ """
300
+ Class for managing Kafka deployments for Matrice streaming API.
301
+ """
302
+
303
+ def __init__(self: Any, session: Any, service_id: str, type: str, consumer_group_id: str = None, consumer_group_instance_id: str = None, sasl_mechanism: Optional[str] = 'SCRAM-SHA-256', sasl_username: Optional[str] = 'matrice-sdk-user', sasl_password: Optional[str] = 'matrice-sdk-password', security_protocol: str = 'SASL_PLAINTEXT', custom_request_service_id: str = None, custom_result_service_id: str = None, enable_metrics: bool = True, metrics_interval: int = 60) -> None: ...
304
+ """
305
+ Initialize Kafka deployment with deployment ID.
306
+
307
+ Args:
308
+ session: Session object for authentication and RPC
309
+ service_id: ID of the deployment/service (used as deployment_id for metrics)
310
+ type: Type of deployment ("client" or "server")
311
+ consumer_group_id: Kafka consumer group ID
312
+ consumer_group_instance_id: Kafka consumer group instance ID for static membership
313
+ sasl_mechanism: SASL mechanism for authentication
314
+ sasl_username: Username for SASL authentication
315
+ sasl_password: Password for SASL authentication
316
+ security_protocol: Security protocol for Kafka connection
317
+ custom_request_service_id: Custom request service ID
318
+ custom_result_service_id: Custom result service ID
319
+ enable_metrics: Enable metrics reporting
320
+ metrics_interval: Metrics reporting interval in seconds
321
+ Raises:
322
+ ValueError: If type is not "client" or "server"
323
+ """
324
+
325
+ async def async_consume_message(self: Any, timeout: float = 60.0) -> Optional[Dict]: ...
326
+ """
327
+ Consume a message from Kafka asynchronously.
328
+
329
+ Args:
330
+ timeout: Maximum time to wait for message in seconds
331
+
332
+ Returns:
333
+ Message dictionary if available, None if no message received
334
+
335
+ Raises:
336
+ RuntimeError: If consumer is not initialized
337
+ AsyncKafkaError: If message consumption fails
338
+ """
339
+
340
+ async def async_produce_message(self: Any, message: dict, timeout: float = 60.0, key: Optional[str] = None) -> None: ...
341
+ """
342
+ Produce a message to Kafka asynchronously.
343
+
344
+ Args:
345
+ message: Message to produce
346
+ timeout: Maximum time to wait for message delivery in seconds
347
+ key: Optional key for message partitioning (stream_id/camera_id)
348
+
349
+ Raises:
350
+ RuntimeError: If producer is not initialized or event loop is unavailable
351
+ ValueError: If message is invalid
352
+ AsyncKafkaError: If message production fails
353
+ """
354
+
355
+ def check_setup_success(self: Any) -> bool: ...
356
+ """
357
+ Check if the Kafka setup is successful and attempt to recover if not.
358
+
359
+ Returns:
360
+ bool: True if setup was successful, False otherwise
361
+ """
362
+
363
+ async def close(self: Any) -> None: ...
364
+ """
365
+ Close Kafka producer and consumer connections.
366
+
367
+ This method gracefully closes all Kafka connections without raising exceptions
368
+ to ensure proper cleanup during shutdown.
369
+ """
370
+
371
+ def configure_metrics_reporting(self: Any, interval: int = 60, batch_size: int = 1000) -> None: ...
372
+ """
373
+ Configure background metrics reporting for both sync and async Kafka utilities.
374
+
375
+ This method enables automatic metrics collection and reporting to the backend API
376
+ for all Kafka operations performed through this deployment.
377
+
378
+ Args:
379
+ interval: Reporting interval in seconds (default: 60)
380
+ batch_size: Maximum metrics per batch (default: 1000)
381
+ """
382
+
383
+ def consume_message(self: Any, timeout: float = 60.0) -> Optional[Dict]: ...
384
+ """
385
+ Consume a message from Kafka.
386
+
387
+ Args:
388
+ timeout: Maximum time to wait for message in seconds
389
+
390
+ Returns:
391
+ Message dictionary if available, None if no message received
392
+
393
+ Raises:
394
+ RuntimeError: If consumer is not initialized
395
+ KafkaError: If message consumption fails
396
+ """
397
+
398
+ def get_all_metrics(self: Any) -> Dict: ...
399
+ """
400
+ Get aggregated metrics from all Kafka utilities.
401
+
402
+ Returns:
403
+ Dict: Combined metrics from sync and async Kafka utilities
404
+ """
405
+
406
+ def get_kafka_info(self: Any) -> Any: ...
407
+ """
408
+ Get Kafka setup information from the API.
409
+
410
+ Returns:
411
+ Tuple containing (setup_success, bootstrap_server, request_topic, result_topic)
412
+
413
+ Raises:
414
+ ValueError: If API requests fail or return invalid data
415
+ """
416
+
417
+ def get_metrics_summary(self: Any) -> Dict: ...
418
+ """
419
+ Get a summary of metrics from all Kafka utilities.
420
+
421
+ Returns:
422
+ Dict: Summarized metrics with counts and statistics
423
+ """
424
+
425
+ def produce_message(self: Any, message: dict, timeout: float = 60.0, key: Optional[str] = None) -> None: ...
426
+ """
427
+ Produce a message to Kafka.
428
+
429
+ Args:
430
+ message: Message to produce
431
+ timeout: Maximum time to wait for message delivery in seconds
432
+ key: Optional key for message partitioning (stream_id/camera_id)
433
+
434
+ Raises:
435
+ RuntimeError: If producer is not initialized
436
+ ValueError: If message is invalid
437
+ KafkaError: If message production fails
438
+ """
439
+
440
+ def refresh(self: Any) -> Any: ...
441
+ """
442
+ Refresh the Kafka producer and consumer connections.
443
+ """
444
+