clarifai 11.0.5__py3-none-any.whl → 11.0.6rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (156) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  4. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  5. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  11. clarifai/cli/model.py +15 -2
  12. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  13. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  14. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  15. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  16. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  17. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  18. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  19. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
  21. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  23. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  24. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  25. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  26. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  27. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  28. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  29. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  30. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  31. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  32. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  33. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  34. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  35. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  36. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  37. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  38. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  39. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  40. clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
  41. clarifai/models/model_serving/README.md +158 -0
  42. clarifai/models/model_serving/__init__.py +14 -0
  43. clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
  44. clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
  45. clarifai/models/model_serving/cli/__init__.py +12 -0
  46. clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  47. clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
  48. clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
  49. clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
  50. clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
  51. clarifai/models/model_serving/cli/_utils.py +53 -0
  52. clarifai/models/model_serving/cli/base.py +14 -0
  53. clarifai/models/model_serving/cli/build.py +79 -0
  54. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  55. clarifai/models/model_serving/cli/create.py +171 -0
  56. clarifai/models/model_serving/cli/example_cli.py +34 -0
  57. clarifai/models/model_serving/cli/login.py +26 -0
  58. clarifai/models/model_serving/cli/upload.py +183 -0
  59. clarifai/models/model_serving/constants.py +21 -0
  60. clarifai/models/model_serving/docs/cli.md +161 -0
  61. clarifai/models/model_serving/docs/concepts.md +229 -0
  62. clarifai/models/model_serving/docs/dependencies.md +11 -0
  63. clarifai/models/model_serving/docs/inference_parameters.md +139 -0
  64. clarifai/models/model_serving/docs/model_types.md +19 -0
  65. clarifai/models/model_serving/model_config/__init__.py +16 -0
  66. clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
  67. clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
  68. clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
  69. clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
  70. clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
  71. clarifai/models/model_serving/model_config/base.py +369 -0
  72. clarifai/models/model_serving/model_config/config.py +312 -0
  73. clarifai/models/model_serving/model_config/inference_parameter.py +129 -0
  74. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -0
  75. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -0
  76. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -0
  77. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -0
  78. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -0
  79. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -0
  80. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -0
  81. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -0
  82. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -0
  83. clarifai/models/model_serving/model_config/output.py +133 -0
  84. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  85. clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
  86. clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
  87. clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
  88. clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
  89. clarifai/models/model_serving/model_config/triton/serializer.py +136 -0
  90. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  91. clarifai/models/model_serving/model_config/triton/wrappers.py +281 -0
  92. clarifai/models/model_serving/repo_build/__init__.py +14 -0
  93. clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
  94. clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
  95. clarifai/models/model_serving/repo_build/build.py +198 -0
  96. clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
  97. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  98. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  99. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  100. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  101. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  102. clarifai/models/model_serving/repo_build/static_files/triton/model.py +75 -0
  103. clarifai/models/model_serving/utils.py +31 -0
  104. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  105. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  106. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  107. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  108. clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
  109. clarifai/runners/deepgram_live_transcribe.py +98 -0
  110. clarifai/runners/deepgram_live_transcribe.py~ +98 -0
  111. clarifai/runners/deepgram_runner.py +131 -0
  112. clarifai/runners/deepgram_runner.py~ +130 -0
  113. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  114. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +79 -0
  115. clarifai/runners/dockerfile_template/Dockerfile.template +53 -29
  116. clarifai/runners/example_llama2.py~ +72 -0
  117. clarifai/runners/matt_example.py +89 -0
  118. clarifai/runners/matt_example.py~ +87 -0
  119. clarifai/runners/matt_llm_example.py +129 -0
  120. clarifai/runners/matt_llm_example.py~ +128 -0
  121. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  122. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  123. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  124. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  125. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  126. clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
  127. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  128. clarifai/runners/models/model_upload.py +7 -4
  129. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  130. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  131. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  132. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  133. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  134. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  135. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  136. clarifai/runners/utils/const.py +7 -6
  137. clarifai/runners/utils/loader.py +1 -0
  138. clarifai/runners/utils/logging.py +6 -0
  139. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  140. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  141. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  142. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  143. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  144. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  145. clarifai/utils/logging.py +1 -1
  146. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  147. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  148. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  149. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  150. {clarifai-11.0.5.dist-info → clarifai-11.0.6rc2.dist-info}/METADATA +16 -27
  151. clarifai-11.0.6rc2.dist-info/RECORD +242 -0
  152. {clarifai-11.0.5.dist-info → clarifai-11.0.6rc2.dist-info}/WHEEL +1 -1
  153. clarifai-11.0.5.dist-info/RECORD +0 -100
  154. {clarifai-11.0.5.dist-info → clarifai-11.0.6rc2.dist-info}/LICENSE +0 -0
  155. {clarifai-11.0.5.dist-info → clarifai-11.0.6rc2.dist-info}/entry_points.txt +0 -0
  156. {clarifai-11.0.5.dist-info → clarifai-11.0.6rc2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,229 @@
1
+ # Overview
2
+
3
+ Model Serving is a straightforward interface that links user model implementations in Python with a high-performance serving framework (tritonserver). It seamlessly integrates with the Clarifai Platform, allowing users to deploy their models without any prerequisites in the serving framework.
4
+
5
+ ```plaintext
6
+
7
+ |Model code in Python| ---> |Model Serving + Clarifai Platform| ---> |Served model|
8
+
9
+ ```
10
+
11
+ # Understanding the concepts
12
+
13
+ While functioning as an interface, it comes with certain constraints that must be adhered to throughout the process.
14
+
15
+ ## Model repository
16
+
17
+ First of all, the model repository structure obtained by running
18
+
19
+ ```bash
20
+ clarifai create model --type ... --working-dir ...
21
+ ```
22
+
23
+ In your working dir:
24
+
25
+ ```bash
26
+ ├── inference.py
27
+ ├── clarifai_config.yaml
28
+ ├── test.py
29
+ └── requirements.txt
30
+ ```
31
+ Where:
32
+
33
+ * [inference.py](): The crucial file where users need to implement their Python code.
34
+ * [clarifai_config.yaml](): Contains all necessary configurations for model `test`, `build` and `upload`
35
+ * [test.py](): Predefined test cases to evaluate `inference.py`.
36
+ * [requirements.text](): Equivalent to a normal Python project's requirements.txt.
37
+
38
+ ## inference.py
39
+ Includes the ModelInference class, inherited from one of the Clarifai Models, providing utility wrapper functions and docstring to ensure that customized models work seamlessly within the platform server. The specific Clairfai Model is determined by the --type argument provided by users in the clarifai create model command.
40
+
41
+ Sample for `text-to-text` model
42
+
43
+ ```python
44
+ class InferenceModel(TextToText):
45
+ """User model inference class."""
46
+
47
+ def __init__(self) -> None:
48
+ """
49
+ Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
50
+ in this method so they are loaded only once for faster inference.
51
+ """
52
+ # current directory
53
+ self.base_path: Path = os.path.dirname(__file__)
54
+
55
+ def predict(self, input_data: list,
56
+ inference_parameters: Dict[str, Union[str, float, int, bool]]) -> list:
57
+ """ Custom prediction function for `text-to-text` (also called as `text generation`) model.
58
+
59
+ Args:
60
+ input_data (List[str]): List of text
61
+ inference_parameters (Dict[str, Union[str, float, int, bool]]): your inference parameters
62
+
63
+ Returns:
64
+ list of TextOutput
65
+
66
+ """
67
+
68
+ raise NotImplementedError()
69
+
70
+ ```
71
+
72
+ Users are required to implement two functions:
73
+
74
+ * `__init__`: a method to load the model, called once.
75
+ * `predict`: a function designed to generate predictions based on the provided inputs and inference parameters. This method includes a docstring inherited from its parent, providing information on input, parameters, and output types. Refer to the docstring to confirm that the outputs of this method adhere to the correct [Clarifai Output Type](../model_config/output.py), as errors may occur otherwise.
76
+
77
+ When making predictions through the Clarifai API, user inputs are transmitted to input_data as a List of strings for text input or a List of NumPy arrays for RGB image input, where each array has a shape of [W, H, 3]. Additionally, all inference parameters are conveyed through the inference_parameters argument of the predict method.
78
+ ```plaintext
79
+
80
+ list of user inputs e.g. inference parameters e.g.
81
+ `text-to-text` will be {'top_k': 5, 'temperature': 0.7, 'do_sample': False, ...}
82
+ ['text', 'test text',] |
83
+ | |
84
+ | |
85
+ | |
86
+ v v
87
+ def predict(self, input_data:list, inference_parameters: Dict[str, str | float | int | bool]) -> list:
88
+ ...
89
+ # Predict with input data
90
+ outputs = self.model(input_data, **inference_parameters)
91
+ # Convert to Clarifai Output Type
92
+ return [TextOutput(each) for each in outputs]
93
+ |
94
+ |
95
+ |
96
+ v
97
+ Outputs are handled by the module -> platform backend to delivery back to user
98
+ ```
99
+
100
+ For testing the implementation, it's recommended to execute pytest test.py or directly call the predict method of a ModelInference instance.
101
+
102
+ ## clarifai_config.yaml
103
+
104
+ `yaml` file for essential configs
105
+
106
+ ```yaml
107
+ clarifai_model:
108
+ clarifai_model_id:
109
+ clarifai_user_app_id:
110
+ description:
111
+ inference_parameters: (*)
112
+ labels: (*)
113
+ type: (**)
114
+ serving_backend:
115
+ triton: (***)
116
+ max_batch_size:
117
+ image_shape:
118
+ ```
119
+
120
+ Explanation:
121
+
122
+ `clarifai_model`: configs for building/testing/uploading process
123
+
124
+ * `clarifai_model_id` (str, optional): Model ID on the platform.
125
+ * `clarifai_user_app_id` (str, optional): User ID and App ID on the platform seperated by `/` for example `user_1/app_1`.
126
+ * `description` (str, optional): Model description.
127
+ > These 3 attributes are used to upload model. If not provided, they can be passed in *upload* command.
128
+
129
+ * (*) `inference_parameters` (List[Dict], optional): inference parameters for your model prediction method. This attribute is used to *test* and *upload* if provided. Two ways to insert it:
130
+
131
+ * Manual: Follow this [doc](./inference_parameters.md)
132
+ * Semi Manual: in *test.py*, init BaseTest with dict of your desired parameters. Learn more about [test.py]()
133
+
134
+ * (*) `labels` (list): insert manually list of concept names ***required by*** these model types **visual-classifier**, **visual-detector**, **visual-segmenter** and **text-classifier**.
135
+
136
+ * (**) `type` (str): type of your model, generated when init working dir. ***MUST NOT MODIFY IT***
137
+
138
+ `serving_backend`: custom config for serving
139
+
140
+ * `triton`: (optional)
141
+
142
+ * `max_batch_size` (int): Maximum number of inputs will go to `predict`. The default value is 1. Since `predict` method receives a list of inputs, if your model supports batch inference, you can set it to a value greater than 1 to leverage high-performance computation on the GPU.
143
+
144
+ * `image_shape` (list): Applicable only for image input models. It is a list of the width and height of the input image. The default is [-1, -1], which means it accepts any size.
145
+ > These 2 attributes can be set when initialize using **clarifai create model** command.
146
+
147
+ ## test.py
148
+ The file is generated when initializing to test InfercenceModel in inference.py.
149
+
150
+ This test offers two essential features to enhance the testing and validation process:
151
+
152
+ **1. Implementation Validation**
153
+
154
+ Prior to initiating the build or upload processes, users can leverage this feature to thoroughly validate their implementation. This ensures the correctness and readiness of the model for deployment.
155
+
156
+ The test involves the validation of custom configuration in clarifai_config.yaml:
157
+
158
+ * Confirming that labels are provided for concept-output models.
159
+ * Verifying the format of inference_parameters.
160
+
161
+ Additionally, it validates the InferenceModel implementation:
162
+
163
+ * Ensuring the model is loaded correctly.
164
+ * Testing predict with dummy inputs.
165
+
166
+ **2. Inference Parameter Management**
167
+
168
+ Users can conveniently add or update inference parameters directly in the clarifai_config.yaml file. Additionally, the system performs automatic validation during the inference, ensuring the accuracy and compatibility of these parameters with the model's requirements. The test ensures **you can only use defined inference parameters with appropriate value**
169
+
170
+ ### file structure
171
+
172
+ ```python
173
+ class CustomTest(unittest.TestCase):
174
+
175
+ def setUp(self) -> None:
176
+ your_infer_parameter = dict()
177
+ self.model = BaseTest(your_infer_parameter)
178
+
179
+ def test_default_cases(self):
180
+ self.model.test_with_default_inputs()
181
+
182
+ ```
183
+
184
+ Explanation:
185
+
186
+ * `your_infer_parameter = dict()`: define your inference parameters as dict with key is parameter name and value is default value of it. For example, define params for hf text-generation model:
187
+
188
+ ```python
189
+ your_infer_parameter = dict(top_p=0.95, temperature=1, return_text=False, prefix="test")
190
+ ```
191
+
192
+ * `self.model = BaseTest(your_infer_parameter)` Loaded implemented model and convert inference parameters to *Clarifai inference parameters` format and save it in `clarifai_config.yaml`. See more [doc](./inference_parameters.md)
193
+
194
+ * `def test_default_cases(self):` Test your model with dummy input. If these dummy input value fails your model, kindly remove or comment out this function
195
+
196
+ Define new test:
197
+
198
+ Create a function with 'test' prefix, see `pytest` document to understand how to make a test case.
199
+ Call predict by `self.model.predict([list of input data], inference_paramters)`. For instance:
200
+
201
+ * Text input:
202
+
203
+ ```python
204
+ def test_text_input(self):
205
+ text: list = ["Tell me about Clarifai", "How deploy model to Clarifai"]
206
+ outputs = self.model.predict(text, temperature=0.9) # In term of inference parameters for the above example, it will PASSED
207
+ outputs = self.model.predict(text, top_k=10) # And this one will FAILED since `top_k` param is not defined when init self.model
208
+
209
+ ```
210
+
211
+ * Image input:
212
+
213
+ ```python
214
+ def test_image(self):
215
+ image = cv2.imread("path/to/image")
216
+ image = image[:, :, ::-1] # convert to RGB
217
+ out = self.model.predict([image])
218
+ ```
219
+
220
+ * MultiModal input:
221
+
222
+ ```python
223
+ def test_image_and_text(self):
224
+ image = cv2.imread("path/to/image")
225
+ image = image[:, :, ::-1]
226
+ text = "this is text"
227
+ input = dict(text=text, image=image)
228
+ out = self.model.predict([input])
229
+ ```
@@ -0,0 +1,11 @@
1
+ ## Inference Execution Environments
2
+
3
+ Each model built for inference with triton requires certain dependencies & dependency versions be installed for successful inference execution.
4
+ An execution environment is created for each model to be deployed on Clarifai and all necessary dependencies as listed in the `requirements.txt` file are installed there.
5
+
6
+ ## Supported python and torch versions
7
+
8
+ Currently, models must use python 3.8 (any 3.8.x). Supported torch versions are 1.13.1, 2.0.1 and 2.1.1.
9
+ If your model depends on torch, torch must be listed in your requirements.txt file (even if it is
10
+ already a dependency of another package). An appropriate supported torch version will be selected
11
+ based on your requirements.txt.
@@ -0,0 +1,139 @@
1
+ ## Inference paramaters
2
+
3
+ In order to send it to `inference_parameters` of `predict` in `inference.py`, you can define some parameters and they will be visible and adjustable on Clarifai model view.
4
+
5
+ This document helps you to understand the concept of inference parameters and how to add it `clarifai_config.yaml`
6
+
7
+ ## Overview
8
+
9
+ Each paramter has 4 fields:
10
+
11
+ * `path` (str): name of your parameter, it must be valid as python variable
12
+ * `field_type` (int): the parameter data type is one of {1,2,21,3}, it means {boolean, string, encrypted_string, number} respectively. `Number` means `int` or `float`. "Encrypted_string is a string that can be used to store your secrets, like API key. The API will not return the values for this as plaintext.
13
+ * `default_value`: a default value of the parameter.
14
+ * `description` (str): short sentence describes what the parameter does
15
+
16
+ An example of 4 type parameters:
17
+
18
+ ```yaml
19
+ - path: boolean_var
20
+ default_value: true
21
+ field_type: 1
22
+ description: a boolean variable
23
+ - path: string_var
24
+ default_value: "a string"
25
+ field_type: 2
26
+ description: a string variable
27
+ - path: number_var
28
+ default_value: 1
29
+ field_type: 3
30
+ description: a number variable
31
+ - path: secret_string_var
32
+ default_value: "YOUR_SECRET"
33
+ field_type: 21
34
+ description: a string variable contains secret like API key
35
+ ```
36
+
37
+ ## Add them to the config file
38
+
39
+ For example with 4 sample paramaters above.
40
+
41
+ 1. Manually:
42
+ Insert them to field inference_parameters of the file, e.g.
43
+
44
+ ```yaml
45
+ clarifai_model:
46
+ clarifai_model_id: ''
47
+ clarifai_user_app_id: ''
48
+ description: ''
49
+ inference_parameters:
50
+ - path: boolean_var
51
+ default_value: true
52
+ field_type: 1
53
+ description: a boolean variable
54
+ - path: string_var
55
+ default_value: "a string"
56
+ field_type: 2
57
+ description: a string variable
58
+ - path: number_var
59
+ default_value: 1
60
+ field_type: 3
61
+ description: a number variable
62
+ - path: secret_string_var
63
+ default_value: "YOUR_SECRET"
64
+ field_type: 21
65
+ description: a string variable contains secret like API key
66
+ labels: []
67
+ type: text-to-image
68
+ serving_backend:
69
+ triton:
70
+ ...
71
+ ```
72
+
73
+ 2. Semi: If you have a large number of fields, adding them one by one with specific field types can be exhaustive and unsafe.
74
+
75
+ To address this, you can define them as a dictionary, where the key is the path and the value is the default value. Then, inject them into `BaseTest()` in `test.py` within your model repository. For example, suppose your test.py looks like this:
76
+
77
+ ```python
78
+ class CustomTest(unittest.TestCase):
79
+
80
+ def setUp(self) -> None:
81
+ your_infer_parameter = dict()
82
+ self.model = BaseTest(your_infer_parameter)
83
+
84
+ def test_default_cases(self):
85
+ self.model.test_with_default_inputs()
86
+
87
+ ```
88
+
89
+ The `BaseTest` class takes inference parameters as a dict, then validating their values and finally save to the config file
90
+ With current samples, the file will turn to
91
+
92
+ ```python
93
+ class CustomTest(unittest.TestCase):
94
+
95
+ def setUp(self) -> None:
96
+ your_infer_parameter = dict(boolean_var=True, string_var="a string", number_var=1, float_number_var=0.1, _secret_string_var="YOUR_SECRET")
97
+ self.model = BaseTest(your_infer_parameter)
98
+
99
+ ...
100
+ ```
101
+
102
+ After run `test.py` with pytest. The config file looks like:
103
+
104
+ ```yaml
105
+ clarifai_model:
106
+ clarifai_model_id: ''
107
+ clarifai_user_app_id: ''
108
+ description: ''
109
+ inference_parameters:
110
+ - path: boolean_var
111
+ default_value: true
112
+ field_type: 1
113
+ description: boolean_var
114
+ - path: string_var
115
+ default_value: "a string"
116
+ field_type: 2
117
+ description: string_var
118
+ - path: number_var
119
+ default_value: 1
120
+ field_type: 3
121
+ description: number_var
122
+ - path: float_number_var
123
+ default_value: 0.1
124
+ field_type: 3
125
+ description: float_number_var
126
+ - path: _secret_string_var
127
+ default_value: "YOUR_SECRET"
128
+ field_type: 21
129
+ description: _secret_string_var
130
+ labels: []
131
+ type: text-to-image
132
+ serving_backend:
133
+ triton:
134
+ ...
135
+ ```
136
+
137
+ > [!Note]
138
+ > * `description` field is set as `path`
139
+ > * For `ENCRYPTED_STRING`, it must be defined with `"_" prefix`
@@ -0,0 +1,19 @@
1
+ Each model type requires different input and output types. The table below illustrates the relationship between supported models and their corresponding input and output types.
2
+
3
+ | Type | Input | Output |
4
+ |---------------------|-------------|----------------------|
5
+ | multimodal-embedder | image,text | EmbeddingOutput |
6
+ | text-classifier | text | ClassifierOutput |
7
+ | text-embedder | text | EmbeddingOutput |
8
+ | text-to-image | text | ImageOutput |
9
+ | text-to-text | text | TextOutput |
10
+ | visual-classifier | image | ClassifierOutput |
11
+ | visual-detector | image | VisualDetectorOutput |
12
+ | visual-embedder | image | EmbeddingOutput |
13
+ | visual-segmenter | image | MasksOutput |
14
+
15
+ Note:
16
+
17
+ * `image`: single image is RGB np.ndarray with shape of [W, H, 3]
18
+ * `text`: single text is a string in python
19
+ * `multimodal`: has more than one input types
@@ -0,0 +1,16 @@
1
+ # Copyright 2023 Clarifai, Inc.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ from .base import * # noqa
14
+ from .config import * # noqa
15
+ from .inference_parameter import InferParam, InferParamManager # noqa
16
+ from .output import * # noqa