clarifai 10.8.3__py3-none-any.whl → 10.8.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/client/dataset.py +9 -3
- clarifai/constants/dataset.py +1 -1
- clarifai/datasets/upload/base.py +6 -3
- clarifai/datasets/upload/features.py +10 -0
- clarifai/datasets/upload/image.py +22 -13
- clarifai/datasets/upload/multimodal.py +70 -0
- clarifai/datasets/upload/text.py +8 -5
- clarifai/runners/utils/data_handler.py +31 -44
- clarifai/runners/utils/loader.py +6 -5
- clarifai/utils/misc.py +6 -0
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/METADATA +2 -1
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/RECORD +17 -60
- clarifai/models/model_serving/README.md +0 -158
- clarifai/models/model_serving/__init__.py +0 -14
- clarifai/models/model_serving/cli/__init__.py +0 -12
- clarifai/models/model_serving/cli/_utils.py +0 -53
- clarifai/models/model_serving/cli/base.py +0 -14
- clarifai/models/model_serving/cli/build.py +0 -79
- clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
- clarifai/models/model_serving/cli/create.py +0 -171
- clarifai/models/model_serving/cli/example_cli.py +0 -34
- clarifai/models/model_serving/cli/login.py +0 -26
- clarifai/models/model_serving/cli/upload.py +0 -183
- clarifai/models/model_serving/constants.py +0 -21
- clarifai/models/model_serving/docs/cli.md +0 -161
- clarifai/models/model_serving/docs/concepts.md +0 -229
- clarifai/models/model_serving/docs/dependencies.md +0 -11
- clarifai/models/model_serving/docs/inference_parameters.md +0 -139
- clarifai/models/model_serving/docs/model_types.md +0 -19
- clarifai/models/model_serving/model_config/__init__.py +0 -16
- clarifai/models/model_serving/model_config/base.py +0 -369
- clarifai/models/model_serving/model_config/config.py +0 -312
- clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
- clarifai/models/model_serving/model_config/output.py +0 -133
- clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
- clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
- clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
- clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
- clarifai/models/model_serving/repo_build/__init__.py +0 -14
- clarifai/models/model_serving/repo_build/build.py +0 -198
- clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
- clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
- clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
- clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
- clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
- clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
- clarifai/models/model_serving/utils.py +0 -31
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/LICENSE +0 -0
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/WHEEL +0 -0
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/entry_points.txt +0 -0
- {clarifai-10.8.3.dist-info → clarifai-10.8.5.dist-info}/top_level.txt +0 -0
@@ -1,312 +0,0 @@
|
|
1
|
-
import logging
|
2
|
-
from copy import deepcopy
|
3
|
-
from dataclasses import asdict, dataclass, field
|
4
|
-
from typing import Any, List
|
5
|
-
|
6
|
-
import yaml
|
7
|
-
|
8
|
-
from .inference_parameter import InferParam
|
9
|
-
from .output import * # noqa: F403
|
10
|
-
from .triton import DType # noqa
|
11
|
-
from .triton import Device, DynamicBatching, InputConfig, OutputConfig, TritonModelConfig
|
12
|
-
|
13
|
-
logger = logging.getLogger(__name__)
|
14
|
-
|
15
|
-
__all__ = ["ModelTypes", "ModelConfigClass", "MODEL_TYPES", "get_model_config", "load_user_config"]
|
16
|
-
|
17
|
-
|
18
|
-
# Clarifai model types
|
19
|
-
@dataclass
|
20
|
-
class ModelTypes:
|
21
|
-
""" All supported Clarifai model type names
|
22
|
-
"""
|
23
|
-
visual_detector: str = "visual-detector"
|
24
|
-
visual_classifier: str = "visual-classifier"
|
25
|
-
text_classifier: str = "text-classifier"
|
26
|
-
text_to_text: str = "text-to-text"
|
27
|
-
text_embedder: str = "text-embedder"
|
28
|
-
text_to_image: str = "text-to-image"
|
29
|
-
visual_embedder: str = "visual-embedder"
|
30
|
-
visual_segmenter: str = "visual-segmenter"
|
31
|
-
multimodal_embedder: str = "multimodal-embedder"
|
32
|
-
|
33
|
-
def __post_init__(self):
|
34
|
-
self.all = list(asdict(self).values())
|
35
|
-
|
36
|
-
@property
|
37
|
-
def image_input_models(self):
|
38
|
-
""" Return list of model types having image as input or one of inputs"""
|
39
|
-
_visual = [each for each in self.all if each.startswith("visual")]
|
40
|
-
|
41
|
-
return _visual + [self.multimodal_embedder]
|
42
|
-
|
43
|
-
|
44
|
-
@dataclass
|
45
|
-
class FieldMapsConfig:
|
46
|
-
input_fields_map: dict = field(default_factory=dict)
|
47
|
-
output_fields_map: dict = field(default_factory=dict)
|
48
|
-
|
49
|
-
|
50
|
-
@dataclass
|
51
|
-
class ServingBackendConfig:
|
52
|
-
"""
|
53
|
-
"""
|
54
|
-
triton: TritonModelConfig = None
|
55
|
-
|
56
|
-
|
57
|
-
@dataclass
|
58
|
-
class ClarifaiModelConfig:
|
59
|
-
"""Clarifai necessary configs for building/uploading/creation
|
60
|
-
|
61
|
-
Args:
|
62
|
-
field_maps (FieldMapsConfig): Field maps config
|
63
|
-
output_type (dataclass): model output type
|
64
|
-
labels (List[str]): list of concept names
|
65
|
-
inference_parameters (List[InferParam]): list of inference parameters
|
66
|
-
clarifai_model_id (str): Clarifai model id on the platform
|
67
|
-
type (str): one of `MODEL_TYPES`
|
68
|
-
clarifai_user_app_id (str): User ID and App ID separated by '/', e.g., <user_id>/<app_id>
|
69
|
-
description (str): model description
|
70
|
-
|
71
|
-
"""
|
72
|
-
field_maps: FieldMapsConfig = None
|
73
|
-
output_type: str = None
|
74
|
-
labels: List[str] = field(default_factory=list)
|
75
|
-
inference_parameters: List[InferParam] = field(default_factory=list)
|
76
|
-
clarifai_model_id: str = ""
|
77
|
-
type: str = ""
|
78
|
-
clarifai_user_app_id: str = ""
|
79
|
-
description: str = ""
|
80
|
-
|
81
|
-
def _checking(self, var_name: str, var_value: Any):
|
82
|
-
if var_name == "type":
|
83
|
-
_model_types = MODEL_TYPES + [""]
|
84
|
-
assert self.type in _model_types
|
85
|
-
elif var_name == "clarifai_model_id" and var_value:
|
86
|
-
# TODO: Must ensure name is valid
|
87
|
-
pass
|
88
|
-
elif var_name == "clarifai_user_app_id" and var_value:
|
89
|
-
_user_app = var_value.split("/")
|
90
|
-
assert len(_user_app) == 2, ValueError(
|
91
|
-
f"id must be combination of user_id and app_id separated by `/`, e.g. <user_id>/<app_id>. Got {var_value}"
|
92
|
-
)
|
93
|
-
elif var_name == "labels":
|
94
|
-
if var_value:
|
95
|
-
assert isinstance(var_value, tuple) or isinstance(
|
96
|
-
var_value, list), f"labels must be tuple or list, got {type(var_value)}"
|
97
|
-
var_value = [str(each) for each in var_value]
|
98
|
-
|
99
|
-
return var_value
|
100
|
-
|
101
|
-
def __setattr__(self, __name: str, __value: Any) -> None:
|
102
|
-
__value = self._checking(__name, __value)
|
103
|
-
|
104
|
-
super().__setattr__(__name, __value)
|
105
|
-
|
106
|
-
|
107
|
-
@dataclass
|
108
|
-
class ModelConfigClass():
|
109
|
-
"""All config of model
|
110
|
-
Args:
|
111
|
-
clarifai_model (ClarifaiModelConfig): Clarifai model config
|
112
|
-
serving_backend (ServingBackendConfig): Custom serving backend config. Only support triton for now
|
113
|
-
"""
|
114
|
-
clarifai_model: ClarifaiModelConfig
|
115
|
-
serving_backend: ServingBackendConfig
|
116
|
-
|
117
|
-
def make_triton_model_config(
|
118
|
-
self,
|
119
|
-
model_name: str,
|
120
|
-
model_version: str,
|
121
|
-
image_shape: List = None,
|
122
|
-
instance_group: Device = Device(),
|
123
|
-
dynamic_batching: DynamicBatching = DynamicBatching(),
|
124
|
-
max_batch_size: int = 1,
|
125
|
-
backend: str = "python",
|
126
|
-
) -> TritonModelConfig:
|
127
|
-
|
128
|
-
return TritonModelConfig(
|
129
|
-
model_name=model_name,
|
130
|
-
model_version=model_version,
|
131
|
-
image_shape=image_shape,
|
132
|
-
instance_group=instance_group,
|
133
|
-
dynamic_batching=dynamic_batching,
|
134
|
-
max_batch_size=max_batch_size,
|
135
|
-
backend=backend,
|
136
|
-
input=self.serving_backend.triton.input,
|
137
|
-
output=self.serving_backend.triton.output)
|
138
|
-
|
139
|
-
def dump_to_user_config(self):
|
140
|
-
data = asdict(self)
|
141
|
-
_self = deepcopy(self)
|
142
|
-
# dump backend
|
143
|
-
if hasattr(_self.serving_backend, "triton"):
|
144
|
-
dict_triton_config = asdict(_self.serving_backend.triton)
|
145
|
-
for k, v in dict_triton_config.items():
|
146
|
-
if (k == "max_batch_size" and v > 1) \
|
147
|
-
or (k == "image_shape" and v != [-1, -1] and self.clarifai_model.type in ModelTypes().image_input_models):
|
148
|
-
continue
|
149
|
-
else:
|
150
|
-
data["serving_backend"]["triton"].pop(k, None)
|
151
|
-
|
152
|
-
if not data["serving_backend"]["triton"]:
|
153
|
-
data["serving_backend"].pop("triton", None)
|
154
|
-
if not data["serving_backend"]:
|
155
|
-
data.pop("serving_backend", None)
|
156
|
-
|
157
|
-
# dump clarifai model
|
158
|
-
data["clarifai_model"].pop("field_maps", None)
|
159
|
-
data["clarifai_model"].pop("output_type", None)
|
160
|
-
|
161
|
-
return data
|
162
|
-
|
163
|
-
@classmethod
|
164
|
-
def custom_doc(cls):
|
165
|
-
msg = f"{cls.__doc__}\nWhere: \n\n"
|
166
|
-
for k, v in cls.__annotations__.items():
|
167
|
-
msg += f"* {k}:\n------\n {v.__doc__}\n"
|
168
|
-
return msg
|
169
|
-
|
170
|
-
|
171
|
-
def read_yaml(path: str) -> dict:
|
172
|
-
with open(path, encoding="utf-8") as f:
|
173
|
-
config = yaml.safe_load(f) # model dict
|
174
|
-
return config
|
175
|
-
|
176
|
-
|
177
|
-
def parse_config(config: dict):
|
178
|
-
clarifai_model = config.get("clarifai_model", {})
|
179
|
-
serving_backend = config.get("serving_backend", {})
|
180
|
-
if serving_backend:
|
181
|
-
if serving_backend.get("triton", {}):
|
182
|
-
# parse triton input/output
|
183
|
-
triton = serving_backend["triton"]
|
184
|
-
input_triton_configs = triton.pop("input", {})
|
185
|
-
triton.update(
|
186
|
-
dict(input=[
|
187
|
-
InputConfig(
|
188
|
-
name=input["name"],
|
189
|
-
data_type=eval(f"DType.{input['data_type']}") if isinstance(
|
190
|
-
input['data_type'], str) else input['data_type'],
|
191
|
-
dims=input["dims"],
|
192
|
-
optional=input.get("optional", False),
|
193
|
-
) for input in input_triton_configs
|
194
|
-
]))
|
195
|
-
output_triton_configs = triton.pop("output", {})
|
196
|
-
triton.update(
|
197
|
-
dict(output=[
|
198
|
-
OutputConfig(
|
199
|
-
name=output["name"],
|
200
|
-
data_type=eval(f"DType.{output['data_type']}") if isinstance(
|
201
|
-
output['data_type'], str) else output['data_type'],
|
202
|
-
dims=output["dims"],
|
203
|
-
label_filename=output["label_filename"],
|
204
|
-
) for output in output_triton_configs
|
205
|
-
]))
|
206
|
-
serving_backend.update(dict(triton=TritonModelConfig(**triton)))
|
207
|
-
serving_backend = ServingBackendConfig(**serving_backend)
|
208
|
-
|
209
|
-
# parse field maps for deployment
|
210
|
-
field_maps = clarifai_model.pop("field_maps", {})
|
211
|
-
clarifai_model.update(dict(field_maps=FieldMapsConfig(**field_maps)))
|
212
|
-
# parse inference_parameters
|
213
|
-
inference_parameters = clarifai_model.pop("inference_parameters", [])
|
214
|
-
if inference_parameters is None:
|
215
|
-
inference_parameters = []
|
216
|
-
clarifai_model.update(
|
217
|
-
dict(inference_parameters=[InferParam(**each) for each in inference_parameters]))
|
218
|
-
# parse output type
|
219
|
-
output_type = clarifai_model.pop("output_type", None)
|
220
|
-
if output_type:
|
221
|
-
#if isinstance(output_type, str):
|
222
|
-
# output_type = eval(output_type)
|
223
|
-
clarifai_model.update(dict(output_type=output_type))
|
224
|
-
|
225
|
-
clarifai_model = ClarifaiModelConfig(**clarifai_model)
|
226
|
-
|
227
|
-
return ModelConfigClass(clarifai_model=clarifai_model, serving_backend=serving_backend)
|
228
|
-
|
229
|
-
|
230
|
-
def get_model_config(model_type: str) -> ModelConfigClass:
|
231
|
-
"""
|
232
|
-
Get model config by model type
|
233
|
-
|
234
|
-
Args:
|
235
|
-
|
236
|
-
model_type (str): One of field value of ModelTypes
|
237
|
-
|
238
|
-
Return:
|
239
|
-
ModelConfigClass
|
240
|
-
|
241
|
-
### Example:
|
242
|
-
>>> from clarifai.models.model_serving.model_config import get_model_config, ModelTypes
|
243
|
-
>>> cfg = get_model_config(ModelTypes.text_classifier)
|
244
|
-
>>> custom_triton_config = cfg.make_triton_model_config(**kwargs)
|
245
|
-
|
246
|
-
"""
|
247
|
-
if model_type == "MODEL_TYPE_PLACEHOLDER":
|
248
|
-
logger.warning(
|
249
|
-
"Warning: A placeholder value has been detected for obtaining the model configuration. This will result in empty `ModelConfigClass` object."
|
250
|
-
)
|
251
|
-
return ModelConfigClass(clarifai_model=None, serving_backend=None)
|
252
|
-
|
253
|
-
import os
|
254
|
-
assert model_type in MODEL_TYPES, f"`model_type` must be in {MODEL_TYPES}"
|
255
|
-
cfg = read_yaml(
|
256
|
-
os.path.join(os.path.dirname(__file__), "model_types_config", f"{model_type}.yaml"))
|
257
|
-
cfg = parse_config(cfg)
|
258
|
-
cfg.clarifai_model.type = model_type
|
259
|
-
return cfg
|
260
|
-
|
261
|
-
|
262
|
-
_model_types = ModelTypes()
|
263
|
-
MODEL_TYPES = _model_types.all
|
264
|
-
del _model_types
|
265
|
-
|
266
|
-
|
267
|
-
def load_user_config(cfg_path: str) -> ModelConfigClass:
|
268
|
-
"""Read `clarifai_config.yaml` in user working dir
|
269
|
-
|
270
|
-
Args:
|
271
|
-
cfg_path (str): path to config
|
272
|
-
|
273
|
-
Returns:
|
274
|
-
ModelConfigClass
|
275
|
-
"""
|
276
|
-
cfg = read_yaml(cfg_path)
|
277
|
-
return _ensure_user_config(cfg)
|
278
|
-
|
279
|
-
|
280
|
-
def _ensure_user_config(user_config: dict) -> ModelConfigClass:
|
281
|
-
"""Ensure user config with default one
|
282
|
-
|
283
|
-
Args:
|
284
|
-
user_config (dict): ModelConfigClass as dict
|
285
|
-
|
286
|
-
Raises:
|
287
|
-
e: Exception when loading user config
|
288
|
-
|
289
|
-
Returns:
|
290
|
-
ModelConfigClass
|
291
|
-
"""
|
292
|
-
|
293
|
-
try:
|
294
|
-
user_config_obj: ModelConfigClass = parse_config(user_config)
|
295
|
-
except Exception as e:
|
296
|
-
raise e
|
297
|
-
|
298
|
-
default_config = get_model_config(user_config_obj.clarifai_model.type)
|
299
|
-
|
300
|
-
for _model_cfg, value in asdict(user_config_obj.clarifai_model).items():
|
301
|
-
|
302
|
-
if value and _model_cfg != "field_maps":
|
303
|
-
setattr(default_config.clarifai_model, _model_cfg, value)
|
304
|
-
|
305
|
-
if hasattr(user_config_obj, "serving_backend"):
|
306
|
-
if hasattr(user_config_obj.serving_backend, "triton"):
|
307
|
-
if user_config_obj.serving_backend.triton.max_batch_size > 1:
|
308
|
-
default_config.serving_backend.triton.max_batch_size = user_config_obj.serving_backend.triton.max_batch_size
|
309
|
-
if user_config_obj.serving_backend.triton.image_shape != [-1, -1]:
|
310
|
-
default_config.serving_backend.triton.image_shape = user_config_obj.serving_backend.triton.image_shape
|
311
|
-
|
312
|
-
return default_config
|
@@ -1,129 +0,0 @@
|
|
1
|
-
import json
|
2
|
-
from dataclasses import asdict, dataclass, field
|
3
|
-
from typing import Any, List
|
4
|
-
|
5
|
-
|
6
|
-
@dataclass(frozen=True)
|
7
|
-
class InferParamType:
|
8
|
-
BOOL: int = 1
|
9
|
-
STRING: int = 2
|
10
|
-
NUMBER: int = 3
|
11
|
-
ENCRYPTED_STRING: int = 21
|
12
|
-
|
13
|
-
|
14
|
-
@dataclass
|
15
|
-
class InferParam:
|
16
|
-
path: str
|
17
|
-
field_type: InferParamType = field(default_factory=InferParamType)
|
18
|
-
default_value: Any = None
|
19
|
-
description: str = ""
|
20
|
-
|
21
|
-
def __post_init__(self):
|
22
|
-
assert self.path.isidentifier(
|
23
|
-
), f"`path` must be valid for creating python variable, got {self.path}"
|
24
|
-
if self.default_value is not None:
|
25
|
-
self.validate_type(self.default_value)
|
26
|
-
|
27
|
-
def validate_type(self, value):
|
28
|
-
if self.field_type == InferParamType.BOOL:
|
29
|
-
assert isinstance(value, bool), f"`field_type` is `BOOL` (bool), however got {type(value)}"
|
30
|
-
elif self.field_type == InferParamType.NUMBER:
|
31
|
-
assert isinstance(value, float) or isinstance(
|
32
|
-
value, int), f"`field_type` is `NUMBER` (float or int), however got {type(value)}"
|
33
|
-
else:
|
34
|
-
assert isinstance(
|
35
|
-
value,
|
36
|
-
str), f"`field_type` is `STRING` or `ENCRYPTED_STRING` (str), however got {type(value)}"
|
37
|
-
|
38
|
-
def __setattr__(self, __name: str, __value: Any) -> None:
|
39
|
-
if __name == "default_value":
|
40
|
-
self.validate_type(__value)
|
41
|
-
super().__setattr__(__name, __value)
|
42
|
-
|
43
|
-
def todict(self):
|
44
|
-
return {k: v for k, v in asdict(self).items()}
|
45
|
-
|
46
|
-
|
47
|
-
@dataclass
|
48
|
-
class InferParamManager:
|
49
|
-
json_path: str = ""
|
50
|
-
params: List[InferParam] = field(default_factory=list)
|
51
|
-
_dict_params: dict = field(init=False)
|
52
|
-
|
53
|
-
@classmethod
|
54
|
-
def from_kwargs(cls, **kwargs):
|
55
|
-
params = list()
|
56
|
-
for k, v in kwargs.items():
|
57
|
-
if isinstance(v, str) and k.startswith("_"):
|
58
|
-
_type = InferParamType.ENCRYPTED_STRING
|
59
|
-
elif isinstance(v, str):
|
60
|
-
_type = InferParamType.STRING
|
61
|
-
elif isinstance(v, bool):
|
62
|
-
_type = InferParamType.BOOL
|
63
|
-
elif isinstance(v, float) or isinstance(v, int):
|
64
|
-
_type = InferParamType.NUMBER
|
65
|
-
else:
|
66
|
-
raise TypeError(f"Unsupported type {type(v)} of argument {k}, support {InferParamType}")
|
67
|
-
param = InferParam(path=k, field_type=_type, default_value=v, description=k)
|
68
|
-
params.append(param)
|
69
|
-
|
70
|
-
return cls(params=params)
|
71
|
-
|
72
|
-
def __post_init__(self):
|
73
|
-
#assert self.params == [] or self.json_path, "`json_path` or `params` must be set"
|
74
|
-
self._dict_params = dict()
|
75
|
-
if self.params == [] and self.json_path:
|
76
|
-
with open(self.json_path, "r") as fp:
|
77
|
-
objs = json.load(fp)
|
78
|
-
objs = objs if isinstance(objs, list) else [objs]
|
79
|
-
self.params = [InferParam(**obj) for obj in objs]
|
80
|
-
for param in self.params:
|
81
|
-
self._dict_params.update({param.path: param})
|
82
|
-
|
83
|
-
def get_list_params(self):
|
84
|
-
list_params = []
|
85
|
-
for each in self.params:
|
86
|
-
list_params.append(each.todict())
|
87
|
-
return list_params
|
88
|
-
|
89
|
-
def export(self, path: str):
|
90
|
-
list_params = self.get_list_params()
|
91
|
-
with open(path, "w") as fp:
|
92
|
-
json.dump(list_params, fp, indent=2)
|
93
|
-
|
94
|
-
def validate(self, **kwargs) -> dict:
|
95
|
-
output_kwargs = {k: v.default_value for k, v in self._dict_params.items()}
|
96
|
-
assert kwargs == {} or self.params != [], "kwargs are rejected since `params` is empty"
|
97
|
-
|
98
|
-
for key, value in kwargs.items():
|
99
|
-
assert key in self._dict_params, f"param `{key}` is not in setting: {list(self._dict_params.keys())}"
|
100
|
-
if key in self._dict_params:
|
101
|
-
self._dict_params[key].validate_type(value)
|
102
|
-
output_kwargs.update({key: value})
|
103
|
-
return output_kwargs
|
104
|
-
|
105
|
-
|
106
|
-
def is_number(v: str):
|
107
|
-
try:
|
108
|
-
_ = float(v)
|
109
|
-
return True
|
110
|
-
except ValueError:
|
111
|
-
return False
|
112
|
-
|
113
|
-
|
114
|
-
def str_to_number(v: str):
|
115
|
-
try:
|
116
|
-
return int(v)
|
117
|
-
except ValueError:
|
118
|
-
return float(v)
|
119
|
-
|
120
|
-
|
121
|
-
def parse_req_parameters(req_params: str):
|
122
|
-
req_params = json.loads(req_params)
|
123
|
-
for k, v in req_params.items():
|
124
|
-
if isinstance(v, str):
|
125
|
-
if is_number(v):
|
126
|
-
v = str_to_number(v)
|
127
|
-
req_params.update({k: v})
|
128
|
-
|
129
|
-
return req_params
|
@@ -1,25 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: image
|
5
|
-
data_type: TYPE_UINT8
|
6
|
-
dims: [-1, -1, 3]
|
7
|
-
optional: true
|
8
|
-
- name: text
|
9
|
-
data_type: TYPE_STRING
|
10
|
-
dims: [1]
|
11
|
-
optional: true
|
12
|
-
output:
|
13
|
-
- name: embeddings
|
14
|
-
data_type: TYPE_FP32
|
15
|
-
dims: [-1]
|
16
|
-
label_filename: null
|
17
|
-
clarifai_model:
|
18
|
-
type: multimodal-embedder
|
19
|
-
output_type: EmbeddingOutput
|
20
|
-
field_maps:
|
21
|
-
input_fields_map:
|
22
|
-
image: image
|
23
|
-
text: text
|
24
|
-
output_fields_map:
|
25
|
-
embeddings: embeddings
|
@@ -1,19 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: text
|
5
|
-
data_type: TYPE_STRING
|
6
|
-
dims: [1]
|
7
|
-
output:
|
8
|
-
- name: softmax_predictions
|
9
|
-
data_type: TYPE_FP32
|
10
|
-
dims: [-1]
|
11
|
-
label_filename: "labels.txt"
|
12
|
-
clarifai_model:
|
13
|
-
type: text-classifier
|
14
|
-
output_type: ClassifierOutput
|
15
|
-
field_maps:
|
16
|
-
input_fields_map:
|
17
|
-
text: text
|
18
|
-
output_fields_map:
|
19
|
-
concepts: softmax_predictions
|
@@ -1,20 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: text
|
5
|
-
data_type: TYPE_STRING
|
6
|
-
dims: [1]
|
7
|
-
output:
|
8
|
-
- name: embeddings
|
9
|
-
data_type: TYPE_FP32
|
10
|
-
dims: [-1]
|
11
|
-
label_filename: null
|
12
|
-
|
13
|
-
clarifai_model:
|
14
|
-
type: text-embedder
|
15
|
-
output_type: EmbeddingOutput
|
16
|
-
field_maps:
|
17
|
-
input_fields_map:
|
18
|
-
text: text
|
19
|
-
output_fields_map:
|
20
|
-
embeddings: embeddings
|
@@ -1,19 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: text
|
5
|
-
data_type: TYPE_STRING
|
6
|
-
dims: [1]
|
7
|
-
output:
|
8
|
-
- name: image
|
9
|
-
data_type: TYPE_UINT8
|
10
|
-
dims: [-1, -1, 3]
|
11
|
-
label_filename: null
|
12
|
-
clarifai_model:
|
13
|
-
type: text-to-image
|
14
|
-
output_type: ImageOutput
|
15
|
-
field_maps:
|
16
|
-
input_fields_map:
|
17
|
-
text: text
|
18
|
-
output_fields_map:
|
19
|
-
image: image
|
@@ -1,19 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: text
|
5
|
-
data_type: TYPE_STRING
|
6
|
-
dims: [1]
|
7
|
-
output:
|
8
|
-
- name: text
|
9
|
-
data_type: TYPE_STRING
|
10
|
-
dims: [1]
|
11
|
-
label_filename: null
|
12
|
-
clarifai_model:
|
13
|
-
type: text-to-text
|
14
|
-
output_type: TextOutput
|
15
|
-
field_maps:
|
16
|
-
input_fields_map:
|
17
|
-
text: text
|
18
|
-
output_fields_map:
|
19
|
-
text: text
|
@@ -1,22 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: image
|
5
|
-
data_type: TYPE_UINT8
|
6
|
-
dims: [-1, -1, 3]
|
7
|
-
output:
|
8
|
-
- name: softmax_predictions
|
9
|
-
data_type: TYPE_FP32
|
10
|
-
dims: [-1]
|
11
|
-
label_filename: "labels.txt"
|
12
|
-
|
13
|
-
clarifai_model:
|
14
|
-
field_maps:
|
15
|
-
input_fields_map:
|
16
|
-
image: image
|
17
|
-
output_fields_map:
|
18
|
-
concepts: softmax_predictions
|
19
|
-
output_type: ClassifierOutput
|
20
|
-
type: visual-classifier
|
21
|
-
labels:
|
22
|
-
inference_parameters:
|
@@ -1,32 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: image
|
5
|
-
data_type: TYPE_UINT8
|
6
|
-
dims: [-1, -1, 3]
|
7
|
-
output:
|
8
|
-
- name: predicted_bboxes
|
9
|
-
data_type: TYPE_FP32
|
10
|
-
dims: [-1, 4]
|
11
|
-
label_filename: null
|
12
|
-
- name: predicted_labels
|
13
|
-
data_type: TYPE_INT32
|
14
|
-
dims: [-1, 1]
|
15
|
-
label_filename: "labels.txt"
|
16
|
-
- name: predicted_scores
|
17
|
-
data_type: TYPE_FP32
|
18
|
-
dims: [-1, 1]
|
19
|
-
label_filename: null
|
20
|
-
|
21
|
-
clarifai_model:
|
22
|
-
field_maps:
|
23
|
-
input_fields_map:
|
24
|
-
image: image
|
25
|
-
output_fields_map:
|
26
|
-
"regions[...].region_info.bounding_box": "predicted_bboxes"
|
27
|
-
"regions[...].data.concepts[...].id": "predicted_labels"
|
28
|
-
"regions[...].data.concepts[...].value": "predicted_scores"
|
29
|
-
output_type: VisualDetectorOutput
|
30
|
-
type: visual-detector
|
31
|
-
labels:
|
32
|
-
inference_parameters:
|
@@ -1,19 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: image
|
5
|
-
data_type: TYPE_UINT8
|
6
|
-
dims: [-1, -1, 3]
|
7
|
-
output:
|
8
|
-
- name: embeddings
|
9
|
-
data_type: TYPE_FP32
|
10
|
-
dims: [-1]
|
11
|
-
label_filename: null
|
12
|
-
clarifai_model:
|
13
|
-
type: visual-embedder
|
14
|
-
output_type: EmbeddingOutput
|
15
|
-
field_maps:
|
16
|
-
input_fields_map:
|
17
|
-
image: image
|
18
|
-
output_fields_map:
|
19
|
-
embeddings: embeddings
|
@@ -1,19 +0,0 @@
|
|
1
|
-
serving_backend:
|
2
|
-
triton:
|
3
|
-
input:
|
4
|
-
- name: image
|
5
|
-
data_type: TYPE_UINT8
|
6
|
-
dims: [-1, -1, 3]
|
7
|
-
output:
|
8
|
-
- name: predicted_mask
|
9
|
-
data_type: TYPE_INT64
|
10
|
-
dims: [-1, -1]
|
11
|
-
label_filename: "labels.txt"
|
12
|
-
clarifai_model:
|
13
|
-
type: visual-segmenter
|
14
|
-
output_type: MasksOutput
|
15
|
-
field_maps:
|
16
|
-
input_fields_map:
|
17
|
-
image: image
|
18
|
-
output_fields_map:
|
19
|
-
"regions[...].region_info.mask,regions[...].data.concepts": "predicted_mask"
|