matrice 1.0.99244__py3-none-any.whl → 1.0.99245__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- matrice/deploy/aggregator/ingestor.py +28 -6
- matrice/deploy/aggregator/synchronizer.py +10 -1
- matrice/deploy/server/stream_worker.py +3 -1
- {matrice-1.0.99244.dist-info → matrice-1.0.99245.dist-info}/METADATA +1 -1
- {matrice-1.0.99244.dist-info → matrice-1.0.99245.dist-info}/RECORD +8 -8
- {matrice-1.0.99244.dist-info → matrice-1.0.99245.dist-info}/WHEEL +0 -0
- {matrice-1.0.99244.dist-info → matrice-1.0.99245.dist-info}/licenses/LICENSE.txt +0 -0
- {matrice-1.0.99244.dist-info → matrice-1.0.99245.dist-info}/top_level.txt +0 -0
@@ -46,8 +46,10 @@ class ResultsIngestor:
|
|
46
46
|
self._is_streaming = False
|
47
47
|
self._lock = threading.RLock()
|
48
48
|
|
49
|
-
# Counter for
|
50
|
-
self._counters: Dict[Tuple[str, str], itertools.count] = {}
|
49
|
+
# Counter for ordering within (deployment_id, stream_key, stream_group_key)
|
50
|
+
self._counters: Dict[Tuple[str, str, str], itertools.count] = {}
|
51
|
+
# Global per-deployment sequence for PriorityQueue tie-breaking across streams
|
52
|
+
self._queue_seq_counters: Dict[str, itertools.count] = {}
|
51
53
|
|
52
54
|
# Track last seen input_order for reset detection
|
53
55
|
self._last_input_order: Dict[Tuple[str, str], int] = {}
|
@@ -74,6 +76,8 @@ class ResultsIngestor:
|
|
74
76
|
for deployment_id in self.deployment_ids:
|
75
77
|
self.results_queues[deployment_id] = PriorityQueue()
|
76
78
|
self.stats["queue_sizes"][deployment_id] = 0
|
79
|
+
# Initialize per-deployment sequence counter
|
80
|
+
self._queue_seq_counters[deployment_id] = itertools.count()
|
77
81
|
|
78
82
|
|
79
83
|
def start_streaming(self) -> bool:
|
@@ -160,10 +164,10 @@ class ResultsIngestor:
|
|
160
164
|
|
161
165
|
def _get_priority_counter(self, deployment_id: str, stream_key: str, stream_group_key: str) -> int:
|
162
166
|
"""
|
163
|
-
Get
|
167
|
+
Get a monotonically increasing counter per (deployment_id, stream_key, stream_group_key) for ordering.
|
164
168
|
|
165
169
|
Returns:
|
166
|
-
|
170
|
+
int: Priority counter for ordering within the same stream
|
167
171
|
"""
|
168
172
|
key = (deployment_id, stream_key, stream_group_key)
|
169
173
|
|
@@ -173,6 +177,16 @@ class ResultsIngestor:
|
|
173
177
|
|
174
178
|
return next(self._counters[key])
|
175
179
|
|
180
|
+
def _get_queue_sequence(self, deployment_id: str) -> int:
|
181
|
+
"""
|
182
|
+
Get a global per-deployment sequence number for tie-breaking across different streams
|
183
|
+
placed in the same PriorityQueue. This prevents Python from trying to compare dicts
|
184
|
+
when primary priorities are equal.
|
185
|
+
"""
|
186
|
+
if deployment_id not in self._queue_seq_counters:
|
187
|
+
self._queue_seq_counters[deployment_id] = itertools.count()
|
188
|
+
return next(self._queue_seq_counters[deployment_id])
|
189
|
+
|
176
190
|
def _stream_results_to_queue(
|
177
191
|
self, deployment_id: str, results_queue: PriorityQueue
|
178
192
|
):
|
@@ -210,6 +224,7 @@ class ResultsIngestor:
|
|
210
224
|
continue
|
211
225
|
|
212
226
|
order = self._get_priority_counter(deployment_id, stream_key, stream_group_key)
|
227
|
+
seq = self._get_queue_sequence(deployment_id)
|
213
228
|
# Create enhanced result object with the structured response
|
214
229
|
enhanced_result = {
|
215
230
|
"deployment_id": deployment_id,
|
@@ -221,7 +236,8 @@ class ResultsIngestor:
|
|
221
236
|
}
|
222
237
|
|
223
238
|
try:
|
224
|
-
|
239
|
+
# Include a sequence tie-breaker to avoid comparing dicts when priorities are equal
|
240
|
+
results_queue.put((order, seq, enhanced_result), block=False)
|
225
241
|
|
226
242
|
with self._lock:
|
227
243
|
self.stats["results_consumed"] += 1
|
@@ -287,7 +303,13 @@ class ResultsIngestor:
|
|
287
303
|
deployment_id
|
288
304
|
].qsize()
|
289
305
|
|
290
|
-
|
306
|
+
# Handle both 2-tuple (legacy) and 3-tuple (with seq) queue items
|
307
|
+
if isinstance(priority_result, tuple):
|
308
|
+
if len(priority_result) >= 3:
|
309
|
+
return priority_result[2]
|
310
|
+
elif len(priority_result) == 2:
|
311
|
+
return priority_result[1]
|
312
|
+
return priority_result
|
291
313
|
except Empty:
|
292
314
|
return None
|
293
315
|
except Exception as exc:
|
@@ -76,7 +76,16 @@ class ResultsSynchronizer:
|
|
76
76
|
while True: # Collect all available results from this queue
|
77
77
|
try:
|
78
78
|
priority_result = queue.get(block=False)
|
79
|
-
|
79
|
+
# PriorityQueue items come in as (order, seq, result) or (order, result)
|
80
|
+
if isinstance(priority_result, tuple):
|
81
|
+
if len(priority_result) >= 3:
|
82
|
+
result = priority_result[2]
|
83
|
+
elif len(priority_result) == 2:
|
84
|
+
result = priority_result[1]
|
85
|
+
else:
|
86
|
+
result = priority_result
|
87
|
+
else:
|
88
|
+
result = priority_result
|
80
89
|
|
81
90
|
stream_key = result.get("stream_key")
|
82
91
|
stream_group_key = result.get("stream_group_key")
|
@@ -52,13 +52,15 @@ class StreamWorker:
|
|
52
52
|
if consumer_group_suffix:
|
53
53
|
consumer_group_id += f"-{consumer_group_suffix}"
|
54
54
|
|
55
|
+
custom_request_service_id = self.inference_pipeline_id if (self.inference_pipeline_id and self.inference_pipeline_id != "000000000000000000000000") else deployment_id
|
56
|
+
|
55
57
|
self.kafka_deployment = MatriceKafkaDeployment(
|
56
58
|
session,
|
57
59
|
deployment_id,
|
58
60
|
"server",
|
59
61
|
consumer_group_id,
|
60
62
|
f"{deployment_instance_id}-{worker_id}",
|
61
|
-
custom_request_service_id=
|
63
|
+
custom_request_service_id=custom_request_service_id
|
62
64
|
)
|
63
65
|
|
64
66
|
# Worker state
|
@@ -94,10 +94,10 @@ matrice/data_processing/data_formats/yolo_detection.py,sha256=qUeZA7_8Of_QWGZlHh
|
|
94
94
|
matrice/deploy/__init__.py,sha256=1IEknWS1AxZL4aGzCyASusGH_vMEPk1L6u8RnnZjI9w,1256
|
95
95
|
matrice/deploy/aggregator/__init__.py,sha256=HFz-ufUMeSpSyRINcrx6NdmrcuVZtPfyIXxYu5UNLTc,508
|
96
96
|
matrice/deploy/aggregator/aggregator.py,sha256=ob-I9ZaGmhXqdzbIHZL8DDtYCaT944ipmlhFzIz6peQ,11450
|
97
|
-
matrice/deploy/aggregator/ingestor.py,sha256
|
97
|
+
matrice/deploy/aggregator/ingestor.py,sha256=-ySEg1UD2ahPXw7Ifr_4QC5xRGb6co8jGD6PgtvCK3w,17141
|
98
98
|
matrice/deploy/aggregator/pipeline.py,sha256=xOKDqb4Qre5Ek3Fr-03C-jxpZNXdQ2_Dk5EaOn4ssYE,33948
|
99
99
|
matrice/deploy/aggregator/publisher.py,sha256=SRPnyindD_R_QNK5MJ_WZAeAaGtj79CZ1VNoltsQtXM,15587
|
100
|
-
matrice/deploy/aggregator/synchronizer.py,sha256=
|
100
|
+
matrice/deploy/aggregator/synchronizer.py,sha256=nSoFxlt4Ol6zbg4yCHufcNsif_Spq3jjKTvB5WY73Gk,20455
|
101
101
|
matrice/deploy/client/__init__.py,sha256=d7yxlWNHYKOoAM9T_AlrSQw1_9cfLEop4zBE0QqrTVw,1330
|
102
102
|
matrice/deploy/client/client.py,sha256=5itmvupufK48lPNb2__ZbZ9q3Q5ycfCDTW6ClDC7cM4,28531
|
103
103
|
matrice/deploy/client/client_stream_utils.py,sha256=IcJf8Xn8VMrvxplKqSP5qNIwvu8--YSYaN4BCelk-iQ,40006
|
@@ -112,7 +112,7 @@ matrice/deploy/client/streaming_gateway/streaming_gateway_utils.py,sha256=_hPwoC
|
|
112
112
|
matrice/deploy/client/streaming_gateway/streaming_results_handler.py,sha256=KTT-AADM0vw1QW1-mkYwLtZOAfC6KT68k3y6ot5dbOU,15700
|
113
113
|
matrice/deploy/server/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
114
114
|
matrice/deploy/server/server.py,sha256=duki4KUU1tvW3Y7wkrlMRVvt7bAP2QqSIsrSogLxC4o,36799
|
115
|
-
matrice/deploy/server/stream_worker.py,sha256=
|
115
|
+
matrice/deploy/server/stream_worker.py,sha256=KDNlYep4Bg1xhHwDrwY9kfxsJfFtATydiYbzVDt58vs,20481
|
116
116
|
matrice/deploy/server/inference/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
|
117
117
|
matrice/deploy/server/inference/batch_manager.py,sha256=7fCHJfSeBqfYgKGZH9_sgUDAllymkGkFm0Oin_-SAYI,8322
|
118
118
|
matrice/deploy/server/inference/cache_manager.py,sha256=ebQ1Y3vGNW3--TCPnWijDK3ix5HuZ_zH7wjs8iHKkbU,1610
|
@@ -242,8 +242,8 @@ matrice/deployment/camera_manager.py,sha256=e1Lc81RJP5wUWRdTgHO6tMWF9BkBdHOSVyx3
|
|
242
242
|
matrice/deployment/deployment.py,sha256=HFt151eWq6iqIAMsQvurpV2WNxW6Cx_gIUVfnVy5SWE,48093
|
243
243
|
matrice/deployment/inference_pipeline.py,sha256=6b4Mm3-qt-Zy0BeiJfFQdImOn3FzdNCY-7ET7Rp8PMk,37911
|
244
244
|
matrice/deployment/streaming_gateway_manager.py,sha256=ifYGl3g25wyU39HwhPQyI2OgF3M6oIqKMWt8RXtMxY8,21401
|
245
|
-
matrice-1.0.
|
246
|
-
matrice-1.0.
|
247
|
-
matrice-1.0.
|
248
|
-
matrice-1.0.
|
249
|
-
matrice-1.0.
|
245
|
+
matrice-1.0.99245.dist-info/licenses/LICENSE.txt,sha256=2bm9uFabQZ3Ykb_SaSU_uUbAj2-htc6WJQmS_65qD00,1073
|
246
|
+
matrice-1.0.99245.dist-info/METADATA,sha256=jebd73yO3PKQ4Am11nGzT9ieq9z63r2W3iNya74DHLA,14624
|
247
|
+
matrice-1.0.99245.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
248
|
+
matrice-1.0.99245.dist-info/top_level.txt,sha256=P97js8ur6o5ClRqMH3Cjoab_NqbJ6sOQ3rJmVzKBvMc,8
|
249
|
+
matrice-1.0.99245.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|