matrice-inference 0.1.0__py3-none-manylinux_2_17_x86_64.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of matrice-inference might be problematic. Click here for more details.
- matrice_inference/deploy/aggregator/aggregator.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/aggregator.pyi +55 -0
- matrice_inference/deploy/aggregator/analytics.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/analytics.pyi +63 -0
- matrice_inference/deploy/aggregator/ingestor.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/ingestor.pyi +79 -0
- matrice_inference/deploy/aggregator/pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/pipeline.pyi +139 -0
- matrice_inference/deploy/aggregator/publisher.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/publisher.pyi +59 -0
- matrice_inference/deploy/aggregator/synchronizer.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/aggregator/synchronizer.pyi +58 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming.pyi +145 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/auto_streaming/auto_streaming_utils.pyi +126 -0
- matrice_inference/deploy/client/client.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client.pyi +337 -0
- matrice_inference/deploy/client/client_stream_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client_stream_utils.pyi +83 -0
- matrice_inference/deploy/client/client_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/client_utils.pyi +77 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway.pyi +120 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_gateway_utils.pyi +442 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/client/streaming_gateway/streaming_results_handler.pyi +19 -0
- matrice_inference/deploy/optimize/cache_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/cache_manager.pyi +15 -0
- matrice_inference/deploy/optimize/frame_comparators.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/frame_comparators.pyi +203 -0
- matrice_inference/deploy/optimize/frame_difference.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/frame_difference.pyi +165 -0
- matrice_inference/deploy/optimize/transmission.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/optimize/transmission.pyi +97 -0
- matrice_inference/deploy/server/inference/batch_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/batch_manager.pyi +50 -0
- matrice_inference/deploy/server/inference/inference_interface.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/inference_interface.pyi +114 -0
- matrice_inference/deploy/server/inference/model_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/model_manager.pyi +80 -0
- matrice_inference/deploy/server/inference/triton_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/inference/triton_utils.pyi +115 -0
- matrice_inference/deploy/server/proxy/proxy_interface.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/proxy/proxy_interface.pyi +90 -0
- matrice_inference/deploy/server/proxy/proxy_utils.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/proxy/proxy_utils.pyi +113 -0
- matrice_inference/deploy/server/server.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/server.pyi +155 -0
- matrice_inference/deploy/server/stream/inference_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/inference_worker.pyi +56 -0
- matrice_inference/deploy/server/stream/kafka_consumer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/kafka_consumer_worker.pyi +51 -0
- matrice_inference/deploy/server/stream/kafka_producer_worker.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/kafka_producer_worker.pyi +50 -0
- matrice_inference/deploy/server/stream/stream_debug_logger.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/stream_debug_logger.pyi +47 -0
- matrice_inference/deploy/server/stream/stream_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/stream_manager.pyi +69 -0
- matrice_inference/deploy/server/stream/video_buffer.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/server/stream/video_buffer.pyi +120 -0
- matrice_inference/deploy/stream/kafka_stream.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/stream/kafka_stream.pyi +444 -0
- matrice_inference/deploy/stream/redis_stream.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deploy/stream/redis_stream.pyi +447 -0
- matrice_inference/deployment/camera_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/camera_manager.pyi +669 -0
- matrice_inference/deployment/deployment.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/deployment.pyi +736 -0
- matrice_inference/deployment/inference_pipeline.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/inference_pipeline.pyi +527 -0
- matrice_inference/deployment/streaming_gateway_manager.cpython-312-x86_64-linux-gnu.so +0 -0
- matrice_inference/deployment/streaming_gateway_manager.pyi +275 -0
- matrice_inference/py.typed +0 -0
- matrice_inference-0.1.0.dist-info/METADATA +26 -0
- matrice_inference-0.1.0.dist-info/RECORD +80 -0
- matrice_inference-0.1.0.dist-info/WHEEL +5 -0
- matrice_inference-0.1.0.dist-info/licenses/LICENSE.txt +21 -0
- matrice_inference-0.1.0.dist-info/top_level.txt +1 -0
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
"""Auto-generated stub for module: kafka_producer_worker."""
|
|
2
|
+
from typing import Any, Dict, List, Optional
|
|
3
|
+
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
from matrice_inference.deploy.stream.kafka_stream import MatriceKafkaDeployment
|
|
6
|
+
import asyncio
|
|
7
|
+
import logging
|
|
8
|
+
|
|
9
|
+
# Classes
|
|
10
|
+
class KafkaProducerWorker:
|
|
11
|
+
"""
|
|
12
|
+
Kafka producer worker that consumes from output queue and produces to topics.
|
|
13
|
+
"""
|
|
14
|
+
|
|
15
|
+
def __init__(self: Any, worker_id: str, session: Any, deployment_id: str, deployment_instance_id: str, output_queue: Any, app_name: str = '', app_version: str = '', produce_timeout: float = 10.0, inference_pipeline_id: str = '') -> None: ...
|
|
16
|
+
"""
|
|
17
|
+
Initialize Kafka producer worker.
|
|
18
|
+
|
|
19
|
+
Args:
|
|
20
|
+
worker_id: Unique identifier for this worker
|
|
21
|
+
session: Session object for authentication and RPC
|
|
22
|
+
deployment_id: ID of the deployment
|
|
23
|
+
deployment_instance_id: ID of the deployment instance
|
|
24
|
+
output_queue: Queue to get result messages from
|
|
25
|
+
app_name: Application name for result formatting
|
|
26
|
+
app_version: Application version for result formatting
|
|
27
|
+
produce_timeout: Timeout for producing to Kafka
|
|
28
|
+
inference_pipeline_id: ID of the inference pipeline
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
def get_metrics(self: Any) -> Dict[str, Any]: ...
|
|
32
|
+
"""
|
|
33
|
+
Get worker metrics.
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def reset_metrics(self: Any) -> None: ...
|
|
37
|
+
"""
|
|
38
|
+
Reset worker metrics.
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
async def start(self: Any) -> None: ...
|
|
42
|
+
"""
|
|
43
|
+
Start the producer worker.
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
async def stop(self: Any) -> None: ...
|
|
47
|
+
"""
|
|
48
|
+
Stop the producer worker.
|
|
49
|
+
"""
|
|
50
|
+
|
|
Binary file
|
|
@@ -0,0 +1,47 @@
|
|
|
1
|
+
"""Auto-generated stub for module: stream_debug_logger."""
|
|
2
|
+
from typing import Any, Dict, List
|
|
3
|
+
|
|
4
|
+
from datetime import datetime, timezone
|
|
5
|
+
import logging
|
|
6
|
+
import time
|
|
7
|
+
|
|
8
|
+
# Classes
|
|
9
|
+
class StreamDebugLogger:
|
|
10
|
+
"""
|
|
11
|
+
Debug logging component for stream processing pipeline.
|
|
12
|
+
"""
|
|
13
|
+
|
|
14
|
+
def __init__(self: Any, enabled: bool = False, log_interval: float = 30.0) -> None: ...
|
|
15
|
+
"""
|
|
16
|
+
Initialize debug logger.
|
|
17
|
+
|
|
18
|
+
Args:
|
|
19
|
+
enabled: Whether debug logging is enabled
|
|
20
|
+
log_interval: Interval between debug log messages in seconds
|
|
21
|
+
"""
|
|
22
|
+
|
|
23
|
+
def disable(self: Any) -> Any: ...
|
|
24
|
+
"""
|
|
25
|
+
Disable debug logging.
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
def enable(self: Any) -> Any: ...
|
|
29
|
+
"""
|
|
30
|
+
Enable debug logging.
|
|
31
|
+
"""
|
|
32
|
+
|
|
33
|
+
def get_debug_summary(self: Any) -> Dict[str, Any]: ...
|
|
34
|
+
"""
|
|
35
|
+
Get debug logging summary.
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
def log_pipeline_status(self: Any, stream_manager: Any) -> Any: ...
|
|
39
|
+
"""
|
|
40
|
+
Log pipeline status if enabled and interval passed.
|
|
41
|
+
"""
|
|
42
|
+
|
|
43
|
+
def should_log(self: Any) -> bool: ...
|
|
44
|
+
"""
|
|
45
|
+
Check if we should log based on interval.
|
|
46
|
+
"""
|
|
47
|
+
|
|
Binary file
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
"""Auto-generated stub for module: stream_manager."""
|
|
2
|
+
from typing import Any, Dict, Optional
|
|
3
|
+
|
|
4
|
+
from matrice_inference.deploy.server.inference.inference_interface import InferenceInterface
|
|
5
|
+
from matrice_inference.deploy.server.stream.inference_worker import InferenceWorker
|
|
6
|
+
from matrice_inference.deploy.server.stream.kafka_consumer_worker import KafkaConsumerWorker
|
|
7
|
+
from matrice_inference.deploy.server.stream.kafka_producer_worker import KafkaProducerWorker
|
|
8
|
+
from matrice_inference.deploy.server.stream.stream_debug_logger import StreamDebugLogger
|
|
9
|
+
import asyncio
|
|
10
|
+
import logging
|
|
11
|
+
import uuid
|
|
12
|
+
|
|
13
|
+
# Classes
|
|
14
|
+
class StreamManager:
|
|
15
|
+
"""
|
|
16
|
+
Stream manager with asyncio queues and integrated debug logging.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
def __init__(self: Any, session: Any, deployment_id: str, deployment_instance_id: str, inference_interface: Any, num_consumers: int = 1, num_inference_workers: int = 1, num_producers: int = 1, app_name: str = '', app_version: str = '', inference_pipeline_id: str = '', debug_logging_enabled: bool = False, debug_log_interval: float = 30.0, input_queue_maxsize: int = 0, output_queue_maxsize: int = 0) -> None: ...
|
|
20
|
+
"""
|
|
21
|
+
Initialize stream manager.
|
|
22
|
+
|
|
23
|
+
Args:
|
|
24
|
+
session: Session object for authentication and RPC
|
|
25
|
+
deployment_id: ID of the deployment
|
|
26
|
+
deployment_instance_id: ID of the deployment instance
|
|
27
|
+
inference_interface: Inference interface to use for inference
|
|
28
|
+
num_consumers: Number of consumer workers
|
|
29
|
+
num_inference_workers: Number of inference workers
|
|
30
|
+
num_producers: Number of producer workers
|
|
31
|
+
app_name: Application name for result formatting
|
|
32
|
+
app_version: Application version for result formatting
|
|
33
|
+
inference_pipeline_id: ID of the inference pipeline
|
|
34
|
+
debug_logging_enabled: Whether to enable debug logging
|
|
35
|
+
debug_log_interval: Interval for debug logging in seconds
|
|
36
|
+
input_queue_maxsize: Maximum size for input queue (0 = unlimited)
|
|
37
|
+
output_queue_maxsize: Maximum size for output queue (0 = unlimited)
|
|
38
|
+
"""
|
|
39
|
+
|
|
40
|
+
def disable_debug_logging(self: Any) -> Any: ...
|
|
41
|
+
"""
|
|
42
|
+
Disable debug logging.
|
|
43
|
+
"""
|
|
44
|
+
|
|
45
|
+
def enable_debug_logging(self: Any, log_interval: Optional[float] = None) -> Any: ...
|
|
46
|
+
"""
|
|
47
|
+
Enable debug logging.
|
|
48
|
+
"""
|
|
49
|
+
|
|
50
|
+
def get_debug_summary(self: Any) -> Dict[str, Any]: ...
|
|
51
|
+
"""
|
|
52
|
+
Get debug logging summary.
|
|
53
|
+
"""
|
|
54
|
+
|
|
55
|
+
def get_metrics(self: Any) -> Dict[str, Any]: ...
|
|
56
|
+
"""
|
|
57
|
+
Get comprehensive metrics.
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
async def start(self: Any) -> None: ...
|
|
61
|
+
"""
|
|
62
|
+
Start the stream manager and all workers.
|
|
63
|
+
"""
|
|
64
|
+
|
|
65
|
+
async def stop(self: Any) -> None: ...
|
|
66
|
+
"""
|
|
67
|
+
Stop the stream manager and all workers.
|
|
68
|
+
"""
|
|
69
|
+
|
|
Binary file
|
|
@@ -0,0 +1,120 @@
|
|
|
1
|
+
"""Auto-generated stub for module: video_buffer."""
|
|
2
|
+
from typing import Any, Dict, List, Optional
|
|
3
|
+
|
|
4
|
+
from collections import defaultdict, deque
|
|
5
|
+
from datetime import datetime, timezone
|
|
6
|
+
import asyncio
|
|
7
|
+
import base64
|
|
8
|
+
import cv2
|
|
9
|
+
import logging
|
|
10
|
+
import numpy as np
|
|
11
|
+
import os
|
|
12
|
+
import tempfile
|
|
13
|
+
|
|
14
|
+
# Functions
|
|
15
|
+
def base64_frames_to_video_bytes_cv2(base64_frames: Any, fps: Any = 10, output_format: Any = 'mp4') -> Any: ...
|
|
16
|
+
"""
|
|
17
|
+
Convert base64-encoded JPEG frames to a video using OpenCV,
|
|
18
|
+
and return the video bytes by writing to a temp file.
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
# Classes
|
|
22
|
+
class FrameBuffer:
|
|
23
|
+
"""
|
|
24
|
+
Buffer for collecting frames into video chunks.
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
def __init__(self: Any, stream_key: str, buffer_config: Dict[str, Any]) -> None: ...
|
|
28
|
+
"""
|
|
29
|
+
Initialize frame buffer for a specific stream.
|
|
30
|
+
|
|
31
|
+
Args:
|
|
32
|
+
stream_key: Unique identifier for the stream
|
|
33
|
+
buffer_config: Configuration for buffering (fps, duration, etc.)
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def add_frame(self: Any, base64_frame: str, metadata: Dict[str, Any]) -> bool: ...
|
|
37
|
+
"""
|
|
38
|
+
Add a frame to the buffer.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
base64_frame: Base64 encoded frame data
|
|
42
|
+
metadata: Frame metadata
|
|
43
|
+
|
|
44
|
+
Returns:
|
|
45
|
+
True if buffer is ready for processing, False otherwise
|
|
46
|
+
"""
|
|
47
|
+
|
|
48
|
+
def clear(self: Any) -> Any: ...
|
|
49
|
+
"""
|
|
50
|
+
Clear the buffer.
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
def create_video_chunk(self: Any) -> Optional[Dict[str, Any]]: ...
|
|
54
|
+
"""
|
|
55
|
+
Create a video chunk from buffered frames.
|
|
56
|
+
|
|
57
|
+
Returns:
|
|
58
|
+
Dictionary containing video data and metadata, or None if failed
|
|
59
|
+
"""
|
|
60
|
+
|
|
61
|
+
def is_expired(self: Any, max_idle_time: float = 30.0) -> bool: ...
|
|
62
|
+
"""
|
|
63
|
+
Check if buffer has been idle too long.
|
|
64
|
+
"""
|
|
65
|
+
|
|
66
|
+
def is_ready(self: Any) -> bool: ...
|
|
67
|
+
"""
|
|
68
|
+
Check if buffer is ready for processing.
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
class VideoBufferManager:
|
|
72
|
+
"""
|
|
73
|
+
Manages multiple frame buffers for different streams.
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
def __init__(self: Any, default_fps: int = 10, default_chunk_duration: float = 5.0, default_timeout: float = 10.0, max_idle_time: float = 30.0, cleanup_interval: float = 60.0) -> None: ...
|
|
77
|
+
"""
|
|
78
|
+
Initialize video buffer manager.
|
|
79
|
+
|
|
80
|
+
Args:
|
|
81
|
+
default_fps: Default FPS for video chunks
|
|
82
|
+
default_chunk_duration: Default chunk duration in seconds
|
|
83
|
+
default_timeout: Default timeout for buffering in seconds
|
|
84
|
+
max_idle_time: Maximum idle time before buffer cleanup
|
|
85
|
+
cleanup_interval: Interval for cleanup tasks
|
|
86
|
+
"""
|
|
87
|
+
|
|
88
|
+
async def add_frame(self: Any, stream_key: str, base64_frame: str, metadata: Dict[str, Any]) -> Optional[Dict[str, Any]]: ...
|
|
89
|
+
"""
|
|
90
|
+
Add a frame to the appropriate buffer.
|
|
91
|
+
|
|
92
|
+
Args:
|
|
93
|
+
stream_key: Stream identifier
|
|
94
|
+
base64_frame: Base64 encoded frame data
|
|
95
|
+
metadata: Frame metadata
|
|
96
|
+
|
|
97
|
+
Returns:
|
|
98
|
+
Video chunk data if buffer is ready, None otherwise
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
def get_metrics(self: Any) -> Dict[str, Any]: ...
|
|
102
|
+
"""
|
|
103
|
+
Get buffer manager metrics.
|
|
104
|
+
"""
|
|
105
|
+
|
|
106
|
+
def reset_metrics(self: Any) -> Any: ...
|
|
107
|
+
"""
|
|
108
|
+
Reset metrics.
|
|
109
|
+
"""
|
|
110
|
+
|
|
111
|
+
async def start(self: Any) -> Any: ...
|
|
112
|
+
"""
|
|
113
|
+
Start the buffer manager.
|
|
114
|
+
"""
|
|
115
|
+
|
|
116
|
+
async def stop(self: Any) -> Any: ...
|
|
117
|
+
"""
|
|
118
|
+
Stop the buffer manager.
|
|
119
|
+
"""
|
|
120
|
+
|