aissemble-inference-core 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- aissemble_inference_core/__init__.py +19 -0
- aissemble_inference_core/client/__init__.py +23 -0
- aissemble_inference_core/client/builder/__init__.py +36 -0
- aissemble_inference_core/client/builder/inference_builder.py +178 -0
- aissemble_inference_core/client/builder/object_detection_builder.py +190 -0
- aissemble_inference_core/client/builder/raw_inference_builder.py +95 -0
- aissemble_inference_core/client/builder/summarization_builder.py +213 -0
- aissemble_inference_core/client/inference_client.py +158 -0
- aissemble_inference_core/client/oip_adapter.py +211 -0
- aissemble_inference_core/client/predictor.py +75 -0
- aissemble_inference_core/client/registry.py +201 -0
- aissemble_inference_core/client/results/__init__.py +29 -0
- aissemble_inference_core/client/results/object_detection_result.py +155 -0
- aissemble_inference_core/client/results/summarization_result.py +78 -0
- aissemble_inference_core/client/translator.py +57 -0
- aissemble_inference_core/client/translators/__init__.py +34 -0
- aissemble_inference_core/client/translators/_image_utils.py +75 -0
- aissemble_inference_core/client/translators/_tensor_utils.py +89 -0
- aissemble_inference_core/client/translators/object_detection_translator.py +212 -0
- aissemble_inference_core/client/translators/summarization_translator.py +147 -0
- aissemble_inference_core/client/translators/tensorflow_object_detection_translator.py +231 -0
- aissemble_inference_core-1.5.0.dist-info/METADATA +71 -0
- aissemble_inference_core-1.5.0.dist-info/RECORD +26 -0
- aissemble_inference_core-1.5.0.dist-info/WHEEL +4 -0
- aissemble_inference_core-1.5.0.dist-info/entry_points.txt +5 -0
- aissemble_inference_core-1.5.0.dist-info/licenses/LICENSE.txt +201 -0
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
20
|
+
from aissemble_inference_core.client.inference_client import InferenceClient
|
|
21
|
+
from aissemble_inference_core.client.registry import ModuleRegistry
|
|
22
|
+
|
|
23
|
+
__all__ = ["InferenceClient", "ModuleRegistry"]
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
20
|
+
from aissemble_inference_core.client.builder.inference_builder import InferenceBuilder
|
|
21
|
+
from aissemble_inference_core.client.builder.object_detection_builder import (
|
|
22
|
+
ObjectDetectionBuilder,
|
|
23
|
+
)
|
|
24
|
+
from aissemble_inference_core.client.builder.raw_inference_builder import (
|
|
25
|
+
RawInferenceBuilder,
|
|
26
|
+
)
|
|
27
|
+
from aissemble_inference_core.client.builder.summarization_builder import (
|
|
28
|
+
SummarizationBuilder,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
"InferenceBuilder",
|
|
33
|
+
"ObjectDetectionBuilder",
|
|
34
|
+
"RawInferenceBuilder",
|
|
35
|
+
"SummarizationBuilder",
|
|
36
|
+
]
|
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
20
|
+
from abc import ABC, abstractmethod
|
|
21
|
+
from typing import Dict, Generator, Any
|
|
22
|
+
from collections.abc import Iterator
|
|
23
|
+
|
|
24
|
+
from aissemble_inference_core.client.oip_adapter import OipAdapter
|
|
25
|
+
from aissemble_inference_core.client.predictor import Predictor
|
|
26
|
+
from aissemble_inference_core.client.translator import Translator
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class InferenceBuilder(ABC, Iterator[Any]):
|
|
30
|
+
"""Abstract base class for building task-specific inference clients.
|
|
31
|
+
|
|
32
|
+
Rather than exposing a generic ``predict`` method, this class encourages the definition
|
|
33
|
+
of task-oriented inference builders (e.g., object detection, text summarization, translation).
|
|
34
|
+
Each concrete subclass can provide a natural, fluent API tailored to its task while
|
|
35
|
+
sharing common configuration state and behavior.
|
|
36
|
+
|
|
37
|
+
This design promotes:
|
|
38
|
+
- Compile-time safety through task-specific method signatures
|
|
39
|
+
- Reusable shared state (model name, adapter, parameters, etc.)
|
|
40
|
+
- Consistent access to streaming and iteration capabilities
|
|
41
|
+
- Alignment with concepts like HuggingFace "tasks"
|
|
42
|
+
|
|
43
|
+
Concrete subclasses should implement ``build_predictor`` to return a fully configured
|
|
44
|
+
:class:`Predictor` instance ready for inference.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
def __init__(self) -> None:
|
|
48
|
+
self._model_name: str | None = None
|
|
49
|
+
self._oip_adapter: OipAdapter | None = None
|
|
50
|
+
self._translator: Translator | None = None
|
|
51
|
+
self._parameters: Dict[str, Any] = {}
|
|
52
|
+
self._streaming: bool = False
|
|
53
|
+
|
|
54
|
+
# -------------------------------------------------------------------------
|
|
55
|
+
# Protected properties (intended for use by subclasses and fluent setters)
|
|
56
|
+
# -------------------------------------------------------------------------
|
|
57
|
+
|
|
58
|
+
@property
|
|
59
|
+
def model_name(self) -> str | None:
|
|
60
|
+
"""The name or identifier of the model to be used for inference."""
|
|
61
|
+
return self._model_name
|
|
62
|
+
|
|
63
|
+
@property
|
|
64
|
+
def oip_adapter(self) -> OipAdapter | None:
|
|
65
|
+
"""The OIP adapter responsible for communication with the inference service."""
|
|
66
|
+
return self._oip_adapter
|
|
67
|
+
|
|
68
|
+
@property
|
|
69
|
+
def translator(self) -> Translator | None:
|
|
70
|
+
"""The translator that handles data format conversion between client and service."""
|
|
71
|
+
return self._translator
|
|
72
|
+
|
|
73
|
+
@property
|
|
74
|
+
def parameters(self) -> Dict[str, Any]:
|
|
75
|
+
"""Task-specific or model-specific inference parameters."""
|
|
76
|
+
return self._parameters
|
|
77
|
+
|
|
78
|
+
@property
|
|
79
|
+
def streaming(self) -> bool:
|
|
80
|
+
"""Whether the inference should be performed in streaming mode."""
|
|
81
|
+
return self._streaming
|
|
82
|
+
|
|
83
|
+
# -------------------------------------------------------------------------
|
|
84
|
+
# Fluent configuration methods (to be implemented/chained by subclasses)
|
|
85
|
+
# -------------------------------------------------------------------------
|
|
86
|
+
|
|
87
|
+
def with_model(self, model_name: str) -> "InferenceBuilder":
|
|
88
|
+
"""Set the model name/identifier.
|
|
89
|
+
|
|
90
|
+
Args:
|
|
91
|
+
model_name: The identifier of the model to use.
|
|
92
|
+
|
|
93
|
+
Returns:
|
|
94
|
+
Self, for method chaining.
|
|
95
|
+
"""
|
|
96
|
+
self._model_name = model_name
|
|
97
|
+
return self
|
|
98
|
+
|
|
99
|
+
def with_adapter(self, adapter: OipAdapter) -> "InferenceBuilder":
|
|
100
|
+
"""Set the OIP adapter instance.
|
|
101
|
+
|
|
102
|
+
Args:
|
|
103
|
+
adapter: The adapter handling protocol communication.
|
|
104
|
+
|
|
105
|
+
Returns:
|
|
106
|
+
Self, for method chaining.
|
|
107
|
+
"""
|
|
108
|
+
self._oip_adapter = adapter
|
|
109
|
+
return self
|
|
110
|
+
|
|
111
|
+
def with_translator(self, translator: Translator) -> "InferenceBuilder":
|
|
112
|
+
"""Set the data translator instance.
|
|
113
|
+
|
|
114
|
+
Args:
|
|
115
|
+
translator: The translator for request/response serialization.
|
|
116
|
+
|
|
117
|
+
Returns:
|
|
118
|
+
Self, for method chaining.
|
|
119
|
+
"""
|
|
120
|
+
self._translator = translator
|
|
121
|
+
return self
|
|
122
|
+
|
|
123
|
+
def with_parameters(self, **parameters: Any) -> "InferenceBuilder":
|
|
124
|
+
"""Add or update inference parameters.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
**parameters: Arbitrary keyword arguments representing inference parameters.
|
|
128
|
+
|
|
129
|
+
Returns:
|
|
130
|
+
Self, for method chaining.
|
|
131
|
+
"""
|
|
132
|
+
self._parameters.update(parameters)
|
|
133
|
+
return self
|
|
134
|
+
|
|
135
|
+
# -------------------------------------------------------------------------
|
|
136
|
+
# Public API required by all task-specific builders
|
|
137
|
+
# -------------------------------------------------------------------------
|
|
138
|
+
|
|
139
|
+
@abstractmethod
|
|
140
|
+
def build_predictor(self) -> Predictor:
|
|
141
|
+
"""Construct and return a fully configured :class:`Predictor` instance.
|
|
142
|
+
|
|
143
|
+
Concrete subclasses must implement this method to assemble the predictor
|
|
144
|
+
using the configured model name, adapter, translator, parameters, etc.
|
|
145
|
+
|
|
146
|
+
Returns:
|
|
147
|
+
A ready-to-use predictor for the specific task.
|
|
148
|
+
"""
|
|
149
|
+
raise NotImplementedError
|
|
150
|
+
|
|
151
|
+
def stream(self) -> "InferenceBuilder":
|
|
152
|
+
"""Enable streaming mode for subsequent inference calls.
|
|
153
|
+
|
|
154
|
+
Returns:
|
|
155
|
+
Self, for method chaining.
|
|
156
|
+
"""
|
|
157
|
+
self._streaming = True
|
|
158
|
+
return self
|
|
159
|
+
|
|
160
|
+
# -------------------------------------------------------------------------
|
|
161
|
+
# Iterator protocol support (for streaming responses)
|
|
162
|
+
# -------------------------------------------------------------------------
|
|
163
|
+
|
|
164
|
+
def __iter__(self) -> Generator[Any, None, None]:
|
|
165
|
+
"""Return a generator that yields streaming results.
|
|
166
|
+
|
|
167
|
+
When streaming is enabled, calling ``iter(builder)`` or using the builder
|
|
168
|
+
in a for-loop should yield incremental results from the inference service.
|
|
169
|
+
|
|
170
|
+
The default implementation raises ``NotImplementedError``; concrete
|
|
171
|
+
task-specific builders that support streaming should override this method.
|
|
172
|
+
|
|
173
|
+
Yields:
|
|
174
|
+
Task-specific streaming chunks (e.g., tokens, bounding boxes, etc.).
|
|
175
|
+
"""
|
|
176
|
+
raise NotImplementedError(
|
|
177
|
+
"Streaming iteration is not implemented for this task"
|
|
178
|
+
)
|
|
@@ -0,0 +1,190 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
20
|
+
from typing import Any, Generator
|
|
21
|
+
|
|
22
|
+
from aissemble_inference_core.client.builder.inference_builder import InferenceBuilder
|
|
23
|
+
from aissemble_inference_core.client.predictor import Predictor
|
|
24
|
+
from aissemble_inference_core.client.results import ObjectDetectionResult
|
|
25
|
+
from aissemble_inference_core.client.translators import DefaultObjectDetectionTranslator
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class ObjectDetectionPredictor(Predictor[Any, ObjectDetectionResult]):
|
|
29
|
+
"""Concrete predictor implementation for object detection."""
|
|
30
|
+
|
|
31
|
+
def __init__(
|
|
32
|
+
self,
|
|
33
|
+
adapter: Any,
|
|
34
|
+
translator: DefaultObjectDetectionTranslator,
|
|
35
|
+
):
|
|
36
|
+
"""Initialize the object detection predictor.
|
|
37
|
+
|
|
38
|
+
Args:
|
|
39
|
+
adapter: OIP adapter for communication
|
|
40
|
+
translator: Translator for preprocessing/postprocessing
|
|
41
|
+
"""
|
|
42
|
+
self.adapter = adapter
|
|
43
|
+
self.translator = translator
|
|
44
|
+
|
|
45
|
+
def predict(self, input_data: Any) -> ObjectDetectionResult: # noqa: A003
|
|
46
|
+
"""Perform object detection on the input image.
|
|
47
|
+
|
|
48
|
+
Args:
|
|
49
|
+
input_data: Image in various formats (PIL, numpy, path, bytes)
|
|
50
|
+
|
|
51
|
+
Returns:
|
|
52
|
+
ObjectDetectionResult with detected objects
|
|
53
|
+
"""
|
|
54
|
+
request = self.translator.preprocess(input_data)
|
|
55
|
+
response = self.adapter.infer(request)
|
|
56
|
+
return self.translator.postprocess(response)
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class ObjectDetectionBuilder(InferenceBuilder):
|
|
60
|
+
"""Task-specific builder for object detection inference.
|
|
61
|
+
|
|
62
|
+
Provides a fluent, natural API for object detection:
|
|
63
|
+
builder.image(my_image).confidence(0.5).run()
|
|
64
|
+
|
|
65
|
+
Example:
|
|
66
|
+
client = InferenceClient(adapter, endpoint)
|
|
67
|
+
result = (client.detect_object()
|
|
68
|
+
.with_model("yolov8")
|
|
69
|
+
.image("photo.jpg")
|
|
70
|
+
.confidence(0.6)
|
|
71
|
+
.run())
|
|
72
|
+
"""
|
|
73
|
+
|
|
74
|
+
def __init__(self):
|
|
75
|
+
"""Initialize the object detection builder."""
|
|
76
|
+
super().__init__()
|
|
77
|
+
self._image_input: Any = None
|
|
78
|
+
self._confidence_threshold: float = 0.0
|
|
79
|
+
self._filter_labels: list[str] | None = None
|
|
80
|
+
|
|
81
|
+
def image(self, image: Any) -> "ObjectDetectionBuilder":
|
|
82
|
+
"""Set the input image for detection.
|
|
83
|
+
|
|
84
|
+
Args:
|
|
85
|
+
image: Image in various formats (PIL Image, numpy array, file path, bytes)
|
|
86
|
+
|
|
87
|
+
Returns:
|
|
88
|
+
Self for method chaining
|
|
89
|
+
"""
|
|
90
|
+
self._image_input = image
|
|
91
|
+
return self
|
|
92
|
+
|
|
93
|
+
def confidence(self, threshold: float) -> "ObjectDetectionBuilder":
|
|
94
|
+
"""Set minimum confidence threshold for detections.
|
|
95
|
+
|
|
96
|
+
Args:
|
|
97
|
+
threshold: Minimum confidence score (0-1)
|
|
98
|
+
|
|
99
|
+
Returns:
|
|
100
|
+
Self for method chaining
|
|
101
|
+
"""
|
|
102
|
+
if not 0 <= threshold <= 1:
|
|
103
|
+
raise ValueError("Confidence threshold must be between 0 and 1")
|
|
104
|
+
self._confidence_threshold = threshold
|
|
105
|
+
return self
|
|
106
|
+
|
|
107
|
+
def labels(self, labels: list[str]) -> "ObjectDetectionBuilder":
|
|
108
|
+
"""Filter detections to only include specified labels.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
labels: List of labels to include
|
|
112
|
+
|
|
113
|
+
Returns:
|
|
114
|
+
Self for method chaining
|
|
115
|
+
"""
|
|
116
|
+
self._filter_labels = labels
|
|
117
|
+
return self
|
|
118
|
+
|
|
119
|
+
def run(self) -> ObjectDetectionResult:
|
|
120
|
+
"""Execute the object detection inference.
|
|
121
|
+
|
|
122
|
+
Returns:
|
|
123
|
+
ObjectDetectionResult with detected objects
|
|
124
|
+
|
|
125
|
+
Raises:
|
|
126
|
+
ValueError: If required inputs are missing
|
|
127
|
+
"""
|
|
128
|
+
if self._image_input is None:
|
|
129
|
+
raise ValueError("Image input is required. Call .image() first.")
|
|
130
|
+
|
|
131
|
+
predictor = self.build_predictor()
|
|
132
|
+
result = predictor.predict(self._image_input)
|
|
133
|
+
|
|
134
|
+
if self._confidence_threshold > 0:
|
|
135
|
+
result = result.filter_by_confidence(self._confidence_threshold)
|
|
136
|
+
|
|
137
|
+
if self._filter_labels:
|
|
138
|
+
result = result.filter_by_label(self._filter_labels)
|
|
139
|
+
|
|
140
|
+
return result
|
|
141
|
+
|
|
142
|
+
def build_predictor(self) -> Predictor[Any, ObjectDetectionResult]:
|
|
143
|
+
"""Build the predictor for object detection.
|
|
144
|
+
|
|
145
|
+
Returns:
|
|
146
|
+
ObjectDetectionPredictor instance
|
|
147
|
+
|
|
148
|
+
Raises:
|
|
149
|
+
ValueError: If adapter or translator is not set
|
|
150
|
+
"""
|
|
151
|
+
if self.oip_adapter is None:
|
|
152
|
+
raise ValueError("OipAdapter is required. Call .with_adapter() first.")
|
|
153
|
+
|
|
154
|
+
translator = (
|
|
155
|
+
self.translator
|
|
156
|
+
if isinstance(self.translator, DefaultObjectDetectionTranslator)
|
|
157
|
+
else DefaultObjectDetectionTranslator()
|
|
158
|
+
)
|
|
159
|
+
|
|
160
|
+
return ObjectDetectionPredictor(
|
|
161
|
+
adapter=self.oip_adapter,
|
|
162
|
+
translator=translator,
|
|
163
|
+
)
|
|
164
|
+
|
|
165
|
+
def __iter__(self) -> Generator[ObjectDetectionResult, None, None]:
|
|
166
|
+
"""Streaming iteration for object detection.
|
|
167
|
+
|
|
168
|
+
Not typically used for object detection but provided for consistency.
|
|
169
|
+
|
|
170
|
+
Yields:
|
|
171
|
+
ObjectDetectionResult instances
|
|
172
|
+
"""
|
|
173
|
+
raise NotImplementedError(
|
|
174
|
+
"Streaming is not supported for object detection tasks"
|
|
175
|
+
)
|
|
176
|
+
|
|
177
|
+
def __next__(self) -> ObjectDetectionResult:
|
|
178
|
+
"""Return the next streaming result.
|
|
179
|
+
|
|
180
|
+
Not typically used for object detection but provided for Iterator protocol.
|
|
181
|
+
|
|
182
|
+
Returns:
|
|
183
|
+
ObjectDetectionResult instance
|
|
184
|
+
|
|
185
|
+
Raises:
|
|
186
|
+
NotImplementedError: Streaming is not supported for object detection
|
|
187
|
+
"""
|
|
188
|
+
raise NotImplementedError(
|
|
189
|
+
"Streaming is not supported for object detection tasks"
|
|
190
|
+
)
|
|
@@ -0,0 +1,95 @@
|
|
|
1
|
+
###
|
|
2
|
+
# #%L
|
|
3
|
+
# aiSSEMBLE::Open Inference Protocol::Core
|
|
4
|
+
# %%
|
|
5
|
+
# Copyright (C) 2024 Booz Allen Hamilton Inc.
|
|
6
|
+
# %%
|
|
7
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
8
|
+
# you may not use this file except in compliance with the License.
|
|
9
|
+
# You may obtain a copy of the License at
|
|
10
|
+
#
|
|
11
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
12
|
+
#
|
|
13
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
14
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
15
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
16
|
+
# See the License for the specific language governing permissions and
|
|
17
|
+
# limitations under the License.
|
|
18
|
+
# #L%
|
|
19
|
+
###
|
|
20
|
+
from typing import Dict, Any
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class RawInferenceBuilder:
|
|
24
|
+
"""A minimal, non-fluent interface for directly supplying raw tensors and parameters to an inference request.
|
|
25
|
+
|
|
26
|
+
This class is intentionally designed to be "ugly" and low-level compared to higher-level task-specific builders.
|
|
27
|
+
Its purpose is to support rare cases where pre-defined abstractions do not suffice (e.g., custom model signatures
|
|
28
|
+
or experimental tasks). Use of this class should be considered a signal that a new, higher-level abstraction
|
|
29
|
+
specific to the task or model should be contributed upstream or maintained in the project.
|
|
30
|
+
|
|
31
|
+
The interface is deliberately non-fluent: each configuration method returns ``None`` rather than ``self``,
|
|
32
|
+
discouraging method chaining and accidental use in place of more ergonomic APIs.
|
|
33
|
+
"""
|
|
34
|
+
|
|
35
|
+
def __init__(self) -> None:
|
|
36
|
+
"""Initialize an empty raw inference request builder."""
|
|
37
|
+
self._inputs: Dict[str, Any] = {}
|
|
38
|
+
self._parameters: Dict[str, Any] = {}
|
|
39
|
+
|
|
40
|
+
def inputs(self, tensors: Dict[str, Any]) -> None:
|
|
41
|
+
"""Set the raw input tensors for the inference request.
|
|
42
|
+
|
|
43
|
+
Args:
|
|
44
|
+
tensors: A mapping of input names to tensor objects (or compatible representations such as NumPy arrays,
|
|
45
|
+
PyTorch tensors, TensorFlow tensors, etc.). The exact type requirements depend on the backend
|
|
46
|
+
implementation.
|
|
47
|
+
|
|
48
|
+
Returns:
|
|
49
|
+
None. This method intentionally does not support fluent chaining.
|
|
50
|
+
"""
|
|
51
|
+
if not isinstance(tensors, dict):
|
|
52
|
+
raise TypeError(
|
|
53
|
+
"tensors must be a dictionary mapping input names to tensor objects"
|
|
54
|
+
)
|
|
55
|
+
self._inputs = tensors.copy()
|
|
56
|
+
|
|
57
|
+
def parameters(self, parameters: Dict[str, Any]) -> None:
|
|
58
|
+
"""Set optional inference parameters (e.g., temperature, top_k, etc.).
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
parameters: A mapping of parameter names to their values. Meaning and supported keys are model-specific.
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
None. This method intentionally does not support fluent chaining.
|
|
65
|
+
"""
|
|
66
|
+
if not isinstance(parameters, dict):
|
|
67
|
+
raise TypeError("parameters must be a dictionary")
|
|
68
|
+
self._parameters = parameters.copy()
|
|
69
|
+
|
|
70
|
+
def run(self) -> Dict[str, Any]:
|
|
71
|
+
"""Execute the inference request using the configured raw inputs and parameters.
|
|
72
|
+
|
|
73
|
+
This method delegates to the underlying client/runner implementation (assumed to be available
|
|
74
|
+
in the broader library context).
|
|
75
|
+
|
|
76
|
+
Returns:
|
|
77
|
+
The raw model outputs, typically a dictionary mapping output names to tensor-like objects.
|
|
78
|
+
|
|
79
|
+
Raises:
|
|
80
|
+
RuntimeError: If the request cannot be executed (e.g., missing client context, network error, etc.).
|
|
81
|
+
NotImplementedError: Placeholder until the actual execution logic is wired in by the library.
|
|
82
|
+
"""
|
|
83
|
+
# Placeholder implementation – actual execution will be provided by integration with the client
|
|
84
|
+
raise NotImplementedError(
|
|
85
|
+
"RawInferenceBuilder.run() is not yet implemented. "
|
|
86
|
+
"This method should be overridden or wired to the inference client in the final library."
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
# Optional helper for inspection/debugging
|
|
90
|
+
def _get_request_payload(self) -> Dict[str, Any]:
|
|
91
|
+
"""Internal helper to retrieve the current payload (used by tests or client integration)."""
|
|
92
|
+
return {
|
|
93
|
+
"inputs": self._inputs,
|
|
94
|
+
"parameters": self._parameters,
|
|
95
|
+
}
|