clarifai 11.7.5rc1__py3-none-any.whl → 11.8.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (222) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/client/user.py +0 -172
  3. clarifai/runners/models/model_builder.py +0 -133
  4. clarifai/runners/models/model_runner.py +21 -3
  5. clarifai/runners/models/openai_class.py +18 -0
  6. {clarifai-11.7.5rc1.dist-info → clarifai-11.8.1.dist-info}/METADATA +1 -1
  7. clarifai-11.8.1.dist-info/RECORD +129 -0
  8. {clarifai-11.7.5rc1.dist-info → clarifai-11.8.1.dist-info}/WHEEL +1 -1
  9. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  10. clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
  11. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  12. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  13. clarifai/__pycache__/errors.cpython-39.pyc +0 -0
  14. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  15. clarifai/__pycache__/versions.cpython-39.pyc +0 -0
  16. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  17. clarifai/cli/__pycache__/__init__.cpython-39.pyc +0 -0
  18. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  19. clarifai/cli/__pycache__/base.cpython-39.pyc +0 -0
  20. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  21. clarifai/cli/__pycache__/compute_cluster.cpython-39.pyc +0 -0
  22. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  23. clarifai/cli/__pycache__/deployment.cpython-39.pyc +0 -0
  24. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  25. clarifai/cli/__pycache__/model.cpython-39.pyc +0 -0
  26. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  27. clarifai/cli/__pycache__/nodepool.cpython-39.pyc +0 -0
  28. clarifai/cli/__pycache__/pipeline.cpython-311.pyc +0 -0
  29. clarifai/cli/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  30. clarifai/cli/model_templates.py +0 -243
  31. clarifai/cli/pipeline_step_templates.py +0 -64
  32. clarifai/cli/templates/__pycache__/__init__.cpython-311.pyc +0 -0
  33. clarifai/cli/templates/__pycache__/pipeline_templates.cpython-311.pyc +0 -0
  34. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  35. clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
  36. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  37. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  38. clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
  39. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  40. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  41. clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
  42. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  43. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  44. clarifai/client/__pycache__/dataset.cpython-39.pyc +0 -0
  45. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  46. clarifai/client/__pycache__/deployment.cpython-39.pyc +0 -0
  47. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  48. clarifai/client/__pycache__/input.cpython-39.pyc +0 -0
  49. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  50. clarifai/client/__pycache__/lister.cpython-39.pyc +0 -0
  51. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  52. clarifai/client/__pycache__/model.cpython-39.pyc +0 -0
  53. clarifai/client/__pycache__/model_client.cpython-311.pyc +0 -0
  54. clarifai/client/__pycache__/model_client.cpython-39.pyc +0 -0
  55. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  56. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  57. clarifai/client/__pycache__/pipeline.cpython-311.pyc +0 -0
  58. clarifai/client/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  59. clarifai/client/__pycache__/runner.cpython-311.pyc +0 -0
  60. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  61. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  62. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  63. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  64. clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
  65. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  66. clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
  67. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  68. clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
  69. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  70. clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
  71. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  72. clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
  73. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  74. clarifai/constants/__pycache__/dataset.cpython-39.pyc +0 -0
  75. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  76. clarifai/constants/__pycache__/input.cpython-39.pyc +0 -0
  77. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  78. clarifai/constants/__pycache__/model.cpython-39.pyc +0 -0
  79. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  80. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  81. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  82. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  83. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  84. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  85. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  86. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  87. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-39.pyc +0 -0
  88. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  89. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  90. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  91. clarifai/datasets/upload/__pycache__/base.cpython-39.pyc +0 -0
  92. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  93. clarifai/datasets/upload/__pycache__/features.cpython-39.pyc +0 -0
  94. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  95. clarifai/datasets/upload/__pycache__/image.cpython-39.pyc +0 -0
  96. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  97. clarifai/datasets/upload/__pycache__/multimodal.cpython-39.pyc +0 -0
  98. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  99. clarifai/datasets/upload/__pycache__/text.cpython-39.pyc +0 -0
  100. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  101. clarifai/datasets/upload/__pycache__/utils.cpython-39.pyc +0 -0
  102. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  103. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  104. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  105. clarifai/models/model_serving/README.md +0 -158
  106. clarifai/models/model_serving/__init__.py +0 -14
  107. clarifai/models/model_serving/cli/__init__.py +0 -12
  108. clarifai/models/model_serving/cli/_utils.py +0 -53
  109. clarifai/models/model_serving/cli/base.py +0 -14
  110. clarifai/models/model_serving/cli/build.py +0 -79
  111. clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
  112. clarifai/models/model_serving/cli/create.py +0 -171
  113. clarifai/models/model_serving/cli/example_cli.py +0 -34
  114. clarifai/models/model_serving/cli/login.py +0 -26
  115. clarifai/models/model_serving/cli/upload.py +0 -179
  116. clarifai/models/model_serving/constants.py +0 -21
  117. clarifai/models/model_serving/docs/cli.md +0 -161
  118. clarifai/models/model_serving/docs/concepts.md +0 -229
  119. clarifai/models/model_serving/docs/dependencies.md +0 -11
  120. clarifai/models/model_serving/docs/inference_parameters.md +0 -139
  121. clarifai/models/model_serving/docs/model_types.md +0 -19
  122. clarifai/models/model_serving/model_config/__init__.py +0 -16
  123. clarifai/models/model_serving/model_config/base.py +0 -369
  124. clarifai/models/model_serving/model_config/config.py +0 -312
  125. clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
  126. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
  127. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
  128. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
  129. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
  130. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
  131. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
  132. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
  133. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
  134. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
  135. clarifai/models/model_serving/model_config/output.py +0 -133
  136. clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
  137. clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
  138. clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
  139. clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
  140. clarifai/models/model_serving/repo_build/__init__.py +0 -14
  141. clarifai/models/model_serving/repo_build/build.py +0 -198
  142. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
  143. clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
  144. clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
  145. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
  146. clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
  147. clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
  148. clarifai/models/model_serving/utils.py +0 -23
  149. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  150. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  151. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  152. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  153. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  154. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  155. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  156. clarifai/runners/models/__pycache__/dummy_openai_model.cpython-311.pyc +0 -0
  157. clarifai/runners/models/__pycache__/mcp_class.cpython-311.pyc +0 -0
  158. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  159. clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
  160. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  161. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  162. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  163. clarifai/runners/models/__pycache__/model_servicer.cpython-311.pyc +0 -0
  164. clarifai/runners/models/__pycache__/openai_class.cpython-311.pyc +0 -0
  165. clarifai/runners/models/base_typed_model.py +0 -238
  166. clarifai/runners/models/model_upload.py +0 -607
  167. clarifai/runners/pipeline_steps/__pycache__/__init__.cpython-311.pyc +0 -0
  168. clarifai/runners/pipeline_steps/__pycache__/pipeline_step_builder.cpython-311.pyc +0 -0
  169. clarifai/runners/pipelines/__pycache__/__init__.cpython-311.pyc +0 -0
  170. clarifai/runners/pipelines/__pycache__/pipeline_builder.cpython-311.pyc +0 -0
  171. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  172. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  173. clarifai/runners/utils/__pycache__/code_script.cpython-311.pyc +0 -0
  174. clarifai/runners/utils/__pycache__/code_script.cpython-39.pyc +0 -0
  175. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  176. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  177. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  178. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  179. clarifai/runners/utils/__pycache__/method_signatures.cpython-311.pyc +0 -0
  180. clarifai/runners/utils/__pycache__/model_utils.cpython-311.pyc +0 -0
  181. clarifai/runners/utils/__pycache__/openai_convertor.cpython-311.pyc +0 -0
  182. clarifai/runners/utils/__pycache__/pipeline_validation.cpython-311.pyc +0 -0
  183. clarifai/runners/utils/__pycache__/serializers.cpython-311.pyc +0 -0
  184. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  185. clarifai/runners/utils/data_handler.py +0 -231
  186. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-311.pyc +0 -0
  187. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-39.pyc +0 -0
  188. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-311.pyc +0 -0
  189. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-39.pyc +0 -0
  190. clarifai/runners/utils/data_types.py +0 -471
  191. clarifai/runners/utils/temp.py +0 -59
  192. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  193. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  194. clarifai/urls/__pycache__/helper.cpython-39.pyc +0 -0
  195. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  196. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  197. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  198. clarifai/utils/__pycache__/cli.cpython-39.pyc +0 -0
  199. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  200. clarifai/utils/__pycache__/config.cpython-39.pyc +0 -0
  201. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  202. clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
  203. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  204. clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
  205. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  206. clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
  207. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  208. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  209. clarifai/utils/__pycache__/protobuf.cpython-39.pyc +0 -0
  210. clarifai/utils/__pycache__/secrets.cpython-311.pyc +0 -0
  211. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  212. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  213. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  214. clarifai/utils/evaluation/__pycache__/testset_annotation_parser.cpython-311.pyc +0 -0
  215. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  216. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  217. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  218. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  219. clarifai-11.7.5rc1.dist-info/RECORD +0 -339
  220. {clarifai-11.7.5rc1.dist-info → clarifai-11.8.1.dist-info}/entry_points.txt +0 -0
  221. {clarifai-11.7.5rc1.dist-info → clarifai-11.8.1.dist-info}/licenses/LICENSE +0 -0
  222. {clarifai-11.7.5rc1.dist-info → clarifai-11.8.1.dist-info}/top_level.txt +0 -0
@@ -1,169 +0,0 @@
1
- import os
2
- from copy import deepcopy
3
- from typing import Dict, Iterable, List, Union
4
-
5
- import numpy as np
6
- import yaml
7
-
8
- from ...constants import IMAGE_TENSOR_NAME, TEXT_TENSOR_NAME
9
- from ...model_config import (ClassifierOutput, EmbeddingOutput, ImageOutput, InferParam,
10
- InferParamManager, MasksOutput, ModelTypes, TextOutput,
11
- VisualDetector, load_user_config)
12
-
13
- _default_texts = ["Photo of a cat", "A cat is playing around", "Hello, this is test"]
14
-
15
- _default_images = [
16
- np.zeros((100, 100, 3), dtype='uint8'), #black
17
- np.ones((100, 100, 3), dtype='uint8') * 255, #white
18
- np.random.uniform(0, 255, (100, 100, 3)).astype('uint8') #noise
19
- ]
20
-
21
-
22
- def _is_valid_logit(x: np.array):
23
- return np.all(0 <= x) and np.all(x <= 1)
24
-
25
-
26
- def _is_non_negative(x: np.array):
27
- return np.all(x >= 0)
28
-
29
-
30
- def _is_integer(x):
31
- return np.all(np.equal(np.mod(x, 1), 0))
32
-
33
-
34
- class BaseTest:
35
- init_inference_parameters = {}
36
-
37
- def __init__(self, init_inference_parameters={}) -> None:
38
- import sys
39
- if 'inference' in sys.modules:
40
- del sys.modules['inference']
41
- import inference
42
- from inference import InferenceModel
43
- self.model = InferenceModel()
44
- self._base_dir = os.path.dirname(inference.__file__)
45
- self.cfg_path = os.path.join(self._base_dir, "clarifai_config.yaml")
46
- self.user_config = load_user_config(self.cfg_path)
47
- self._user_labels = None
48
- # check if labels exists
49
- for output_config in self.user_config.serving_backend.triton.output:
50
- if output_config.label_filename:
51
- self._user_labels = self.user_config.clarifai_model.labels
52
- assert self._user_labels, f"Model type `{self.user_config.clarifai_model.type}` requires labels, "\
53
- f"but can not found value of `clarifai_model.labels` in {self.cfg_path}. Please update this attribute to build the model"
54
-
55
- # update init vs user_defined params
56
- user_defined_infer_params = [
57
- InferParam(**each) for each in self.user_config.clarifai_model.inference_parameters
58
- ]
59
- total_infer_params = []
60
- if init_inference_parameters:
61
- self.init_inference_parameters = init_inference_parameters
62
- for k, v in self.init_inference_parameters.items():
63
- _exist = False
64
- for user_param in user_defined_infer_params:
65
- if user_param.path == k:
66
- if user_param.default_value != v:
67
- print(f"Warning: Overwrite parameter `{k}` with default value `{v}`")
68
- user_param.default_value = v
69
- _exist = True
70
- total_infer_params.append(user_param)
71
- user_defined_infer_params.remove(user_param)
72
- break
73
- if not _exist:
74
- total_infer_params.append(InferParamManager.from_kwargs(**{k: v}).params[0])
75
-
76
- self.infer_param_manager = InferParamManager(
77
- params=total_infer_params + user_defined_infer_params)
78
- self.user_config.clarifai_model.inference_parameters = self.infer_param_manager.get_list_params(
79
- )
80
- self._overwrite_cfg()
81
-
82
- @property
83
- def user_labels(self):
84
- return self._user_labels
85
-
86
- def _overwrite_cfg(self):
87
- config = yaml.dump(self.user_config.dump_to_user_config(),)
88
- with open(self.cfg_path, "w") as f:
89
- f.write(config)
90
-
91
- def predict(self, input_data: Union[List[np.ndarray], List[str], Dict[str, Union[List[
92
- np.ndarray], List[str]]]], **inference_parameters) -> Iterable:
93
- """
94
- Test Prediction method is exact `InferenceModel.predict` method with
95
- checking inference paramters.
96
-
97
- Args:
98
- -----
99
- - input_data: A list of input data item to predict on. The type depends on model input type:
100
- * `image`: List[np.ndarray]
101
- * `text`: List[str]
102
- * `multimodal`:
103
- input_data is list of dict where key is input type name e.i. `image`, `text` and value is list.
104
- {"image": List[np.ndarray], "text": List[str]}
105
-
106
- - **inference_parameters: keyword args of your inference parameters.
107
-
108
- Returns:
109
- --------
110
- List of your inference model output type
111
- """
112
- infer_params = self.infer_param_manager.validate(**inference_parameters)
113
- outputs = self.model.predict(input_data=input_data, inference_parameters=infer_params)
114
- outputs = self._verify_outputs(outputs)
115
- return outputs
116
-
117
- def _verify_outputs(self, outputs: List[Union[ClassifierOutput, VisualDetector, EmbeddingOutput,
118
- TextOutput, ImageOutput, MasksOutput]]):
119
- """Test output value/dims
120
-
121
- Args:
122
- outputs (List[Union[ClassifierOutput, VisualDetector, EmbeddingOutput, TextOutput, ImageOutput, MasksOutput]]): Outputs of `predict` method
123
- """
124
- _outputs = deepcopy(outputs)
125
- _output = _outputs[0]
126
-
127
- if isinstance(_output, EmbeddingOutput):
128
- # not test
129
- pass
130
- elif isinstance(_output, ClassifierOutput):
131
- for each in _outputs:
132
- assert _is_valid_logit(each.predicted_scores), "`predicted_scores` must be in range [0, 1]"
133
- assert len(each.predicted_scores) == len(
134
- self.user_labels
135
- ), f"`predicted_scores` dim must be equal to labels, got {len(each.predicted_scores)} != labels {len(self.user_labels)}"
136
- elif isinstance(_output, VisualDetector):
137
- for each in _outputs:
138
- assert _is_valid_logit(each.predicted_scores), "`predicted_scores` must be in range [0, 1]"
139
- assert _is_integer(each.predicted_labels), "`predicted_labels` must be integer"
140
- assert np.all(0 <= each.predicted_labels) and np.all(each.predicted_labels < len(
141
- self.user_labels)), f"`predicted_labels` must be in [0, {len(self.user_labels) - 1}]"
142
- assert _is_non_negative(each.predicted_bboxes), "`predicted_bboxes` must be >= 0"
143
- elif isinstance(_output, MasksOutput):
144
- for each in _outputs:
145
- assert np.all(0 <= each.predicted_mask) and np.all(each.predicted_mask < len(
146
- self.user_labels)), f"`predicted_mask` must be in [0, {len(self.user_labels) - 1}]"
147
- elif isinstance(_output, TextOutput):
148
- pass
149
- elif isinstance(_output, ImageOutput):
150
- for each in _outputs:
151
- assert _is_non_negative(each.image), "`image` must be >= 0"
152
- else:
153
- pass
154
-
155
- return outputs
156
-
157
- def test_with_default_inputs(self):
158
- model_type = self.user_config.clarifai_model.type
159
- if model_type == ModelTypes.multimodal_embedder:
160
- self.predict(input_data=[{IMAGE_TENSOR_NAME: each} for each in _default_images])
161
- self.predict(input_data=[{TEXT_TENSOR_NAME: each} for each in _default_texts])
162
- self.predict(input_data=[{
163
- TEXT_TENSOR_NAME: text,
164
- IMAGE_TENSOR_NAME: img
165
- } for text, img in zip(_default_texts, _default_images)])
166
- elif model_type.startswith("visual"):
167
- self.predict(input_data=_default_images)
168
- else:
169
- self.predict(input_data=_default_texts)
@@ -1,26 +0,0 @@
1
- # User model inference script.
2
-
3
- import os
4
- from pathlib import Path
5
- from typing import Dict, Union
6
- from clarifai.models.model_serving.model_config import * # noqa
7
-
8
-
9
- class InferenceModel():
10
- """User model inference class."""
11
-
12
- def __init__(self) -> None:
13
- """
14
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
15
- in this method so they are loaded only once for faster inference.
16
- """
17
- # current directory
18
- self.base_path: Path = os.path.dirname(__file__)
19
-
20
- def predict(self,
21
- input_data: list,
22
- inference_parameters: Dict[str, Union[bool, str, float, int]] = {}) -> list:
23
- """predict_docstring
24
- """
25
-
26
- raise NotImplementedError()
@@ -1,25 +0,0 @@
1
- # Sample config of inference_parameters and labels
2
- # For detail, please refer to docs
3
- # --------------------
4
- # inference_parameters:
5
- # - path: boolean_var
6
- # default_value: true
7
- # field_type: 1
8
- # description: a boolean variable
9
- # - path: string_var
10
- # default_value: "a string"
11
- # field_type: 2
12
- # description: a string variable
13
- # - path: number_var
14
- # default_value: 1
15
- # field_type: 3
16
- # description: a number variable
17
- # - path: secret_string_var
18
- # default_value: "YOUR_SECRET"
19
- # field_type: 21
20
- # description: a string variable contains secret like API key
21
- # labels:
22
- # - concept1
23
- # - concept2
24
- # - concept3
25
- # - concept4
@@ -1,40 +0,0 @@
1
- import unittest
2
-
3
- from clarifai.models.model_serving.repo_build import BaseTest
4
-
5
-
6
- class CustomTest(unittest.TestCase):
7
- """
8
- BaseTest loads the InferenceModel from the inference.py file in the current working directory.
9
- To execute the predict method of the InferenceModel, use the predict method in BaseTest.
10
- It takes the exact same inputs and inference parameters, returning the same outputs as InferenceModel.predict.
11
- The difference is that BaseTest.predict verifies your_infer_parameters against config.clarifai_models.inference_parameters and checks the output values.
12
-
13
- For example, test input value of visual-classifier
14
-
15
- def test_input(self):
16
- import cv2
17
- path = "path/to/image"
18
- img = cv2.imread(path)
19
- outputs = self.model.predict([img], infer_param1=..., infer_param2=...)
20
- print(outputs)
21
- assert outputs
22
-
23
- """
24
-
25
- def setUp(self) -> None:
26
- your_infer_parameter = dict(
27
- ) # for example dict(float_var=0.12, string_var="test", _secret_string_var="secret")
28
- self.model = BaseTest(your_infer_parameter)
29
-
30
- def test_default_cases(self):
31
- """Test your model with dummy inputs.
32
- In general, you only need to run this test to check your InferneceModel implementation.
33
- In case the default inputs makes your model failed for some reason (not because of assert in `test_with_default_inputs`),
34
- you can comment out this test.
35
- """
36
- self.model.test_with_default_inputs()
37
-
38
- def test_specific_case1(self):
39
- """ Implement your test case"""
40
- pass
@@ -1,75 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Triton inference server Python Backend Model."""
14
-
15
- import os
16
- import sys
17
-
18
- try:
19
- import triton_python_backend_utils as pb_utils
20
- except ModuleNotFoundError:
21
- pass
22
- from clarifai.models.model_serving.model_config.inference_parameter import parse_req_parameters
23
-
24
-
25
- class TritonPythonModel:
26
- """
27
- Triton Python BE Model.
28
- """
29
-
30
- def initialize(self, args):
31
- """
32
- Triton server init.
33
- """
34
- sys.path.append(os.path.dirname(__file__))
35
- from inference import InferenceModel
36
-
37
- self.inference_obj = InferenceModel()
38
-
39
- # Read input_name from config file
40
- self.input_names = [inp.name for inp in self.inference_obj.config.serving_backend.triton.input]
41
-
42
- def execute(self, requests):
43
- """
44
- Serve model inference requests.
45
- """
46
- responses = []
47
-
48
- for request in requests:
49
- try:
50
- parameters = request.parameters()
51
- except Exception:
52
- print(
53
- "It seems this triton version does not support `parameters()` in request. "
54
- "Please upgrade tritonserver version otherwise can not use `inference_parameters`. Error message: {e}"
55
- )
56
- parameters = None
57
-
58
- parameters = parse_req_parameters(parameters) if parameters else {}
59
-
60
- if len(self.input_names) == 1:
61
- in_batch = pb_utils.get_input_tensor_by_name(request, self.input_names[0])
62
- in_batch = in_batch.as_numpy()
63
- data = in_batch
64
- else:
65
- data = {}
66
- for input_name in self.input_names:
67
- in_batch = pb_utils.get_input_tensor_by_name(request, input_name)
68
- in_batch = in_batch.as_numpy() if in_batch is not None else []
69
- data.update({input_name: in_batch})
70
-
71
- inference_response = self.inference_obj._tritonserver_predict(
72
- input_data=data, inference_parameters=parameters)
73
- responses.append(inference_response)
74
-
75
- return responses
@@ -1,23 +0,0 @@
1
- import os
2
-
3
- from .constants import CLARIFAI_PAT_PATH
4
-
5
-
6
- def _persist_pat(pat: str):
7
- """ Write down pat to CLARIFAI_PAT_PATH """
8
- with open(CLARIFAI_PAT_PATH, "w") as f:
9
- f.write(pat)
10
-
11
-
12
- def _read_pat():
13
- if not os.path.exists(CLARIFAI_PAT_PATH):
14
- return None
15
- with open(CLARIFAI_PAT_PATH, "r") as f:
16
- return f.read().replace("\n", "").replace("\r", "").strip()
17
-
18
-
19
- def login(pat=None):
20
- """ if pat provided, set pat to CLARIFAI_PAT otherwise read pat from file"""
21
- pat = pat or _read_pat()
22
- assert pat, Exception("PAT is not found, please run `clarifai login` to persist your PAT")
23
- os.environ["CLARIFAI_PAT"] = pat
@@ -1,238 +0,0 @@
1
- import itertools
2
- from typing import Any, Dict, Iterator, List, Tuple
3
-
4
- import numpy as np
5
- from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
- from clarifai_grpc.grpc.api.service_pb2 import PostModelOutputsRequest
7
- from google.protobuf import json_format
8
-
9
- from ..utils.data_handler import InputDataHandler, OutputDataHandler
10
- from .model_runner import ModelRunner
11
-
12
-
13
- class AnyAnyModel(ModelRunner):
14
-
15
- def load_model(self):
16
- """
17
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
18
- in this method so they are loaded only once for faster inference.
19
- """
20
- raise NotImplementedError
21
-
22
- def parse_input_request(
23
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
24
- list_input_dict = [
25
- InputDataHandler.from_proto(input).to_python() for input in input_request.inputs
26
- ]
27
- inference_params = json_format.MessageToDict(
28
- input_request.model.model_version.output_info.params)
29
-
30
- return list_input_dict, inference_params
31
-
32
- def convert_output_to_proto(self, outputs: list):
33
- assert (isinstance(outputs, Iterator) or isinstance(outputs, list) or
34
- isinstance(outputs, tuple)), "outputs must be an Iterator"
35
- output_protos = []
36
- for output in outputs:
37
- if isinstance(output, OutputDataHandler):
38
- output = output.proto
39
- elif isinstance(output, resources_pb2.Output):
40
- pass
41
- else:
42
- raise NotImplementedError
43
- output_protos.append(output)
44
-
45
- return service_pb2.MultiOutputResponse(outputs=output_protos)
46
-
47
- def predict_wrapper(
48
- self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
49
- list_dict_input, inference_params = self.parse_input_request(request)
50
- outputs = self.predict(list_dict_input, inference_parameters=inference_params)
51
- return self.convert_output_to_proto(outputs)
52
-
53
- def generate_wrapper(
54
- self, request: PostModelOutputsRequest) -> Iterator[service_pb2.MultiOutputResponse]:
55
- list_dict_input, inference_params = self.parse_input_request(request)
56
- outputs = self.generate(list_dict_input, inference_parameters=inference_params)
57
- for output in outputs:
58
- yield self.convert_output_to_proto(output)
59
-
60
- def _preprocess_stream(
61
- self, request: Iterator[PostModelOutputsRequest]) -> Iterator[Tuple[List[Dict], List[Dict]]]:
62
- """Return generator of processed data (from proto to python) and inference parameters like predict and generate"""
63
- for i, req in enumerate(request):
64
- input_data, _ = self.parse_input_request(req)
65
- yield input_data
66
-
67
- def stream_wrapper(self, request: Iterator[PostModelOutputsRequest]
68
- ) -> Iterator[service_pb2.MultiOutputResponse]:
69
- first_request = next(request)
70
- _, inference_params = self.parse_input_request(first_request)
71
- request_iterator = itertools.chain([first_request], request)
72
- outputs = self.stream(self._preprocess_stream(request_iterator), inference_params)
73
- for output in outputs:
74
- yield self.convert_output_to_proto(output)
75
-
76
- def predict(self, input_data: List[Dict],
77
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
78
- """
79
- Prediction method.
80
-
81
- Args:
82
- -----
83
- - input_data: is list of dict where key is input type name.
84
- * image: np.ndarray
85
- * text: str
86
- * audio: bytes
87
-
88
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
89
-
90
- Returns:
91
- --------
92
- List of OutputDataHandler
93
- """
94
- raise NotImplementedError
95
-
96
- def generate(self, input_data: List[Dict],
97
- inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
98
- """
99
- Generate method.
100
-
101
- Args:
102
- -----
103
- - input_data: is list of dict where key is input type name.
104
- * image: np.ndarray
105
- * text: str
106
- * audio: bytes
107
-
108
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
109
-
110
- Yield:
111
- --------
112
- List of OutputDataHandler
113
- """
114
- raise NotImplementedError
115
-
116
- def stream(self, inputs: Iterator[List[Dict[str, Any]]],
117
- inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
118
- """
119
- Stream method.
120
-
121
- Args:
122
- -----
123
- input_request: is an Iterator of Tuple which
124
- - First element (List[Dict[str, Union[np.ndarray, str, bytes]]]) is list of dict input data type which keys and values are:
125
- * image: np.ndarray
126
- * text: str
127
- * audio: bytes
128
-
129
- - Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
130
-
131
- Yield:
132
- --------
133
- List of OutputDataHandler
134
- """
135
- raise NotImplementedError
136
-
137
-
138
- class VisualInputModel(AnyAnyModel):
139
-
140
- def parse_input_request(
141
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
142
- list_input_dict = [
143
- InputDataHandler.from_proto(input).image(format="np") for input in input_request.inputs
144
- ]
145
- inference_params = json_format.MessageToDict(
146
- input_request.model.model_version.output_info.params)
147
-
148
- return list_input_dict, inference_params
149
-
150
- def load_model(self):
151
- """
152
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
153
- in this method so they are loaded only once for faster inference.
154
- """
155
- raise NotImplementedError
156
-
157
- def predict(self, input_data: List[np.ndarray],
158
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
159
- """
160
- Prediction method.
161
-
162
- Args:
163
- -----
164
- - input_data(List[np.ndarray]): is list of image as np.ndarray type
165
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
166
-
167
- Returns:
168
- --------
169
- List of OutputDataHandler
170
- """
171
- raise NotImplementedError
172
-
173
-
174
- class TextInputModel(AnyAnyModel):
175
-
176
- def load_model(self):
177
- """
178
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
179
- in this method so they are loaded only once for faster inference.
180
- """
181
- raise NotImplementedError
182
-
183
- def parse_input_request(
184
- self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
185
- list_input_text = [InputDataHandler.from_proto(input).text for input in input_request.inputs]
186
- inference_params = json_format.MessageToDict(
187
- input_request.model.model_version.output_info.params)
188
-
189
- return list_input_text, inference_params
190
-
191
- def predict(self, input_data: List[str],
192
- inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
193
- """
194
- Prediction method.
195
-
196
- Args:
197
- -----
198
- - input_data(List[str]): is list of text as str type
199
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
200
-
201
- Returns:
202
- --------
203
- List of OutputDataHandler
204
- """
205
- raise NotImplementedError
206
-
207
- def generate(self, input_data: List[str],
208
- inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
209
- """
210
- Prediction method.
211
-
212
- Args:
213
- -----
214
- - input_data(List[str]): is list of text as str type
215
- - inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
216
-
217
- Yield:
218
- --------
219
- List of OutputDataHandler
220
- """
221
- raise NotImplementedError
222
-
223
- def stream(self, inputs: Iterator[List[str]],
224
- inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
225
- """
226
- Stream method.
227
-
228
- Args:
229
- -----
230
- input_request: is an Iterator of Tuple which
231
- - First element (List[str]) is list of input text:
232
- - Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
233
-
234
- Yield:
235
- --------
236
- List of OutputDataHandler
237
- """
238
- raise NotImplementedError