clarifai 11.1.7rc4__py3-none-any.whl → 11.1.7rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "11.1.7rc4"
1
+ __version__ = "11.1.7rc6"
clarifai/client/model.py CHANGED
@@ -802,13 +802,14 @@ class Model(Lister, BaseClient):
802
802
  elif args:
803
803
  inputs = args[0]
804
804
  if inputs and isinstance(inputs, Iterable):
805
- inputs_iter = iter(inputs)
805
+ inputs_iter = inputs
806
806
  try:
807
807
  peek = next(inputs_iter)
808
808
  except StopIteration:
809
809
  pass
810
810
  else:
811
- use_proto_call = isinstance(peek, resources_pb2.Input)
811
+ use_proto_call = (peek and isinstance(peek, list) and
812
+ isinstance(peek[0], resources_pb2.Input))
812
813
  # put back the peeked value
813
814
  if inputs_iter is inputs:
814
815
  inputs = itertools.chain([peek], inputs_iter)
@@ -6,6 +6,7 @@ from clarifai_grpc.grpc.api.status import status_code_pb2
6
6
 
7
7
  from clarifai.constants.model import MAX_MODEL_PREDICT_INPUTS
8
8
  from clarifai.errors import UserError
9
+ from clarifai.runners.utils.data_utils import is_openai_chat_format
9
10
  from clarifai.runners.utils.method_signatures import (CompatibilitySerializer, deserialize,
10
11
  get_stream_from_signature, serialize,
11
12
  signatures_from_json)
@@ -129,7 +130,8 @@ class ModelClient:
129
130
  batch_inputs = args[0]
130
131
  # Validate each input is a dictionary
131
132
  is_batch_input_valid = all(isinstance(input, dict) for input in batch_inputs)
132
- if is_batch_input_valid:
133
+ if is_batch_input_valid and (not is_openai_chat_format(batch_inputs)):
134
+ # If the batch input is valid, call the function with the batch inputs and the method name
133
135
  return call_func(batch_inputs, method_name)
134
136
 
135
137
  for name, arg in zip(method_argnames, args): # handle positional with zip shortest
@@ -293,7 +295,8 @@ class ModelClient:
293
295
  outputs.append(deserialize(output.data, output_signature, is_output=True))
294
296
  if batch_input:
295
297
  yield outputs
296
- yield outputs[0]
298
+ else:
299
+ yield outputs[0]
297
300
 
298
301
  def _generate_by_proto(
299
302
  self,
@@ -421,8 +424,9 @@ class ModelClient:
421
424
  else:
422
425
  req.inputs.append(inputs)
423
426
  # TODO: put into new proto field?
424
- for inp in req.inputs:
425
- inp.data.metadata['_method_name'] = method_name
427
+ if method_name:
428
+ for inp in req.inputs:
429
+ inp.data.metadata['_method_name'] = method_name
426
430
  yield req
427
431
 
428
432
  def _stream_by_proto(self,
@@ -68,11 +68,11 @@ class ModelBuilder:
68
68
  self.inference_compute_info = self._get_inference_compute_info()
69
69
  self.is_v3 = True # Do model build for v3
70
70
 
71
- def create_model_instance(self, load_model=True):
71
+ def create_model_instance(self, load_model=True, mocking=False):
72
72
  """
73
73
  Create an instance of the model class, as specified in the config file.
74
74
  """
75
- model_class = self.load_model_class()
75
+ model_class = self.load_model_class(mocking=mocking)
76
76
 
77
77
  # initialize the model
78
78
  model = model_class()
@@ -80,7 +80,7 @@ class ModelBuilder:
80
80
  model.load_model()
81
81
  return model
82
82
 
83
- def load_model_class(self):
83
+ def load_model_class(self, mocking=False):
84
84
  """
85
85
  Import the model class from the model.py file, dynamically handling missing dependencies
86
86
  """
@@ -109,8 +109,9 @@ class ModelBuilder:
109
109
  # Mock all third-party imports to avoid ImportErrors or other issues
110
110
  return MagicMock()
111
111
 
112
- # Replace the built-in __import__ function with our custom one
113
- builtins.__import__ = custom_import
112
+ if mocking:
113
+ # Replace the built-in __import__ function with our custom one
114
+ builtins.__import__ = custom_import
114
115
 
115
116
  try:
116
117
  spec.loader.exec_module(module)
@@ -306,7 +307,7 @@ class ModelBuilder:
306
307
  """
307
308
  Returns the method signatures for the model class in YAML format.
308
309
  """
309
- model_class = self.load_model_class()
310
+ model_class = self.load_model_class(mocking=True)
310
311
  method_info = model_class._get_method_info()
311
312
  signatures = {method.name: method.signature for method in method_info.values()}
312
313
  return signatures_to_yaml(signatures)
@@ -315,7 +316,7 @@ class ModelBuilder:
315
316
  """
316
317
  Returns the method signatures for the model class.
317
318
  """
318
- model_class = self.load_model_class()
319
+ model_class = self.load_model_class(mocking=True)
319
320
  method_info = model_class._get_method_info()
320
321
  signatures = [method.signature for method in method_info.values()]
321
322
  return signatures
@@ -163,7 +163,7 @@ class ModelClass(ABC):
163
163
  request = next(request_iterator) # get first request to determine method
164
164
  assert len(request.inputs) == 1, "Streaming requires exactly one input"
165
165
 
166
- method_name = 'generate'
166
+ method_name = 'stream'
167
167
  inference_params = get_inference_params(request)
168
168
  if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
169
169
  method_name = request.inputs[0].data.metadata['_method_name']
@@ -224,7 +224,15 @@ class ModelClass(ABC):
224
224
  for k, v in kwargs.items():
225
225
  if k not in python_param_types:
226
226
  continue
227
- kwargs[k] = data_types.cast(v, python_param_types[k])
227
+
228
+ if hasattr(python_param_types[k], "__args__") and getattr(
229
+ python_param_types[k], "__origin__", None) == data_types.Stream:
230
+ # get the type of the items in the stream
231
+ stream_type = python_param_types[k].__args__[0]
232
+
233
+ kwargs[k] = data_types.cast(v, stream_type)
234
+ else:
235
+ kwargs[k] = data_types.cast(v, python_param_types[k])
228
236
  result.append(kwargs)
229
237
  return result
230
238
 
@@ -7,6 +7,7 @@ import subprocess
7
7
  import sys
8
8
  import tempfile
9
9
  import time
10
+ import traceback
10
11
  import venv
11
12
 
12
13
  from clarifai_grpc.grpc.api import resources_pb2, service_pb2
@@ -114,7 +115,12 @@ class ModelRunLocally:
114
115
  model = self.builder.create_model_instance()
115
116
  # call its test method, if it has one
116
117
  if hasattr(model, "test"):
117
- model.test()
118
+ try:
119
+ model.test()
120
+ logger.info("Model tested successfully!")
121
+ except Exception as e:
122
+ logger.error(f"Error occurred while testing the model: {e}")
123
+ traceback.print_exc()
118
124
 
119
125
  def test_model(self):
120
126
  """Test the model by running it locally in the virtual environment."""
@@ -114,6 +114,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
114
114
  ensure_urls_downloaded(request)
115
115
 
116
116
  for resp in self.model.generate_wrapper(request):
117
+ if resp.status.code != status_code_pb2.SUCCESS:
118
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
119
+ continue
117
120
  successes = []
118
121
  for output in resp.outputs:
119
122
  if not output.HasField('status') or not output.status.code:
@@ -142,6 +145,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
142
145
  ) -> Iterator[service_pb2.RunnerItemOutput]:
143
146
  # Call the generate() method the underlying model implements.
144
147
  for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
148
+ if resp.status.code != status_code_pb2.SUCCESS:
149
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
150
+ continue
145
151
  successes = []
146
152
  for output in resp.outputs:
147
153
  if not output.HasField('status') or not output.status.code:
@@ -13,3 +13,39 @@ def image_to_bytes(img: Image.Image, format="JPEG") -> bytes:
13
13
  def bytes_to_image(bytes_img) -> Image.Image:
14
14
  img = Image.open(BytesIO(bytes_img))
15
15
  return img
16
+
17
+
18
+ def is_openai_chat_format(messages):
19
+ """
20
+ Verify if the given argument follows the OpenAI chat messages format.
21
+
22
+ Args:
23
+ messages (list): A list of dictionaries representing chat messages.
24
+
25
+ Returns:
26
+ bool: True if valid, False otherwise.
27
+ """
28
+ if not isinstance(messages, list):
29
+ return False
30
+
31
+ valid_roles = {"system", "user", "assistant", "function"}
32
+
33
+ for msg in messages:
34
+ if not isinstance(msg, dict):
35
+ return False
36
+ if "role" not in msg or "content" not in msg:
37
+ return False
38
+ if msg["role"] not in valid_roles:
39
+ return False
40
+
41
+ content = msg["content"]
42
+
43
+ # Content should be either a string (text message) or a multimodal list
44
+ if isinstance(content, str):
45
+ continue # Valid text message
46
+
47
+ elif isinstance(content, list):
48
+ for item in content:
49
+ if not isinstance(item, dict):
50
+ return False
51
+ return True
@@ -1,7 +1,7 @@
1
1
  import inspect
2
2
  import json
3
3
  from collections import namedtuple
4
- from typing import List, Tuple, get_args, get_origin
4
+ from typing import Dict, List, Tuple, get_args, get_origin
5
5
 
6
6
  import numpy as np
7
7
  import PIL.Image
@@ -327,13 +327,6 @@ def _normalize_type(tp):
327
327
 
328
328
 
329
329
  def _normalize_data_type(tp):
330
-
331
- # jsonable list and dict, these can be serialized as json
332
- # (tuple we want to keep as a tuple for args and returns, so don't include here)
333
- if tp in (list, dict) or (get_origin(tp) in (list, dict) and _is_jsonable(tp) and
334
- get_args(tp) is None):
335
- return data_types.JSON
336
-
337
330
  # container types that need to be serialized as parts
338
331
  if get_origin(tp) == list and get_args(tp):
339
332
  return List[_normalize_data_type(get_args(tp)[0])]
@@ -371,6 +364,11 @@ def _normalize_data_type(tp):
371
364
  if tp == PIL.Image:
372
365
  raise TypeError('Use PIL.Image.Image instead of PIL.Image module')
373
366
 
367
+ # jsonable list and dict, these can be serialized as json
368
+ # (tuple we want to keep as a tuple for args and returns, so don't include here)
369
+ if tp in (list, dict, Dict) or (get_origin(tp) in (list, dict, Dict) and _is_jsonable(tp)):
370
+ return data_types.JSON
371
+
374
372
  # check for known data types
375
373
  try:
376
374
  if tp in _DATA_TYPES:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: clarifai
3
- Version: 11.1.7rc4
3
+ Version: 11.1.7rc6
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: clarifai-grpc>=11.2.4
23
+ Requires-Dist: clarifai-grpc>=11.2.6
24
24
  Requires-Dist: clarifai-protocol>=0.0.20
25
25
  Requires-Dist: numpy>=1.22.0
26
26
  Requires-Dist: tqdm>=4.65.0
@@ -35,6 +35,17 @@ Requires-Dist: requests>=2.32.3
35
35
  Requires-Dist: aiohttp>=3.10.0
36
36
  Provides-Extra: all
37
37
  Requires-Dist: pycocotools==2.0.6; extra == "all"
38
+ Dynamic: author
39
+ Dynamic: author-email
40
+ Dynamic: classifier
41
+ Dynamic: description
42
+ Dynamic: description-content-type
43
+ Dynamic: home-page
44
+ Dynamic: license
45
+ Dynamic: provides-extra
46
+ Dynamic: requires-dist
47
+ Dynamic: requires-python
48
+ Dynamic: summary
38
49
 
39
50
  <h1 align="center">
40
51
  <a href="https://www.clarifai.com/"><img alt="Clarifai" title="Clarifai" src="https://github.com/user-attachments/assets/623b883b-7fe5-4b95-bbfa-8691f5779af4"></a>
@@ -1,4 +1,4 @@
1
- clarifai/__init__.py,sha256=ifkdBMaRvYCzXalTvOKcl2DNqYnmG2YMz8JBP1t1moU,26
1
+ clarifai/__init__.py,sha256=PzbKrpCo-t2qaebsvu-symHcih514RB4J2tA_qrxQqQ,26
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
@@ -30,8 +30,8 @@ clarifai/client/dataset.py,sha256=y3zKT_VhP1gyN3OO-b3cPeW21ZXyKbQ7ZJkEG06bsTU,32
30
30
  clarifai/client/deployment.py,sha256=w7Y6pA1rYG4KRK1SwusRZc2sQRXlG8wezuVdzSWpCo0,2586
31
31
  clarifai/client/input.py,sha256=obMAHMDU1OwfXZ8KraOnGFlWzlW-3F7Ob_2lcOQMlhY,46339
32
32
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
33
- clarifai/client/model.py,sha256=DFlZLIExMUvYXc9hDzVLYka6_AbfG3vqLlhZkk4GfIY,76835
34
- clarifai/client/model_client.py,sha256=FwI9XEDZjyrubK3ue0-gyTKwK_lShQcm8SoFc66aXiw,17537
33
+ clarifai/client/model.py,sha256=UGBC0weDFCH7OK7vf67yiVmQ09hlRx_9ZkfKzQjLq-U,76896
34
+ clarifai/client/model_client.py,sha256=nKEMH0Rkb1cSVpuIwmMI4Kqf1zAXT6eEPaDoAunHEb4,17796
35
35
  clarifai/client/module.py,sha256=FTkm8s9m-EaTKN7g9MnLhGJ9eETUfKG7aWZ3o1RshYs,4204
36
36
  clarifai/client/nodepool.py,sha256=la3vTFrO4LX8zm2eQ5jqf2L0-kQ63Dano8FibadoZbk,10152
37
37
  clarifai/client/search.py,sha256=GaPWN6JmTQGZaCHr6U1yv0zqR6wKFl7i9IVLg2ul1CI,14254
@@ -138,11 +138,11 @@ clarifai/runners/dockerfile_template/Dockerfile.nim,sha256=CSdUAehj3uOwminioLnT5
138
138
  clarifai/runners/dockerfile_template/Dockerfile.template,sha256=5cjv7U8PmWa3DB_5B1CqSYh_6GE0E0np52TIAa7EIDE,2312
139
139
  clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
140
140
  clarifai/runners/models/base_typed_model.py,sha256=0QCWxch8CcyJSKvE1D4PILd2RSnQZHTmx4DXlQQ6dpo,7856
141
- clarifai/runners/models/model_builder.py,sha256=GlMpJn1krpckvI0cqd8ujrCnoU62z-kqSI3nJfGJEwY,34788
142
- clarifai/runners/models/model_class.py,sha256=qK5qk4geoFpqrRtl4VmKK90CIJKyWmD_vjJMqLWR_CQ,11854
141
+ clarifai/runners/models/model_builder.py,sha256=-GFmVqDr6Dw9zRyiFH2vRaJF2EAVTmj1OfaZPzhzfw8,34877
142
+ clarifai/runners/models/model_class.py,sha256=R32Nxxe-Ols4nyMq8KWer9jqKn2rDyBHr5ZQGDAhJxk,12178
143
143
  clarifai/runners/models/model_class_refract.py,sha256=HxuozxSW7ag5yWCPxjNwgLArQ6dORhyGXlnpPaZz2-c,3211
144
- clarifai/runners/models/model_run_locally.py,sha256=VZetm9Mko8MBjcjwr6PCnTU9gF3glgD5qvpbj-8tW2s,17962
145
- clarifai/runners/models/model_runner.py,sha256=qyc73pe4xc9BsUKHwnOyC9g-RNCARiFis4GTh-yg0vg,6219
144
+ clarifai/runners/models/model_run_locally.py,sha256=m5uLlXpWVGuQIJv-XBt-sHGx1gD8w6d77f9b85DOZqs,18170
145
+ clarifai/runners/models/model_runner.py,sha256=T4Qn_x0vky7XdeS54bvipzEmKZMra1tQdAu_u01yyjc,6503
146
146
  clarifai/runners/models/model_servicer.py,sha256=A--b1P71PBCAMJCpy_-fpNDkfCVdvdMh1LleW15dSas,3037
147
147
  clarifai/runners/models/model_upload.py,sha256=VjJgNNBPP9O7LkNCXxOqa0lTW1M7k6XKVyI6XlLdXIc,25095
148
148
  clarifai/runners/models/temp.py,sha256=0Xy2tAqA5eu2axuTanac995t5IQrnU1y8MdNm-7we6Q,404
@@ -160,10 +160,10 @@ clarifai/runners/utils/const.py,sha256=bwj-Pcw558-pasdIFbNhnkn-9oiCdojYH1fNTTUG2
160
160
  clarifai/runners/utils/data_handler.py,sha256=b7k6MWYPXSgjrfw6wsDf82xFYa0D7UjYmjE4mw5HzHM,8499
161
161
  clarifai/runners/utils/data_handler_refract.py,sha256=3M-V4hkOoF-9Ix4hE6ocXWiTJPc9dewtu6FMtddd-jQ,6343
162
162
  clarifai/runners/utils/data_types.py,sha256=A6IYU55pdPFfoh0K6HkEgTPlgQVv2JUG5lOlqTu1w44,12258
163
- clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
163
+ clarifai/runners/utils/data_utils.py,sha256=g0YEb0j_F6azIKb6BqR-uhWgR0qbmpdL1-Z4KS4mvgU,1228
164
164
  clarifai/runners/utils/loader.py,sha256=SgNHMwRmCCymFQm8aDp73NmIUHhM-N60CBlTKbPzmVc,7470
165
165
  clarifai/runners/utils/logger.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
166
- clarifai/runners/utils/method_signatures.py,sha256=hwrZnSbnt4OSGqnrN7INghtuJm8EJm0CEnTpwlsGgmw,17468
166
+ clarifai/runners/utils/method_signatures.py,sha256=RiAbj7I8JGp0pqwqWZN9AgP1GWYAzizK8LdSeSN_hHE,17432
167
167
  clarifai/runners/utils/serializers.py,sha256=S4sRsOVvH191vAGTRTAAdwLlQwlK4T5QVRDGPptg9nQ,7191
168
168
  clarifai/runners/utils/url_fetcher.py,sha256=v_8JOWmkyFAzsBulsieKX7Nfjy1Yg7wGSZeqfEvw2cg,1640
169
169
  clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc,sha256=0GGbXIecXlOZmQKMCkSRhEBY_a1zvoimv-mHG4pJuNA,167
@@ -229,9 +229,9 @@ clarifai/workflows/__pycache__/__init__.cpython-39.pyc,sha256=9nA--jULSW7OFrYOcs
229
229
  clarifai/workflows/__pycache__/export.cpython-310.pyc,sha256=phEGwi2gAojCUhRTqjZVeTDn7Gk6LCVBeSTjAj4m9iY,2418
230
230
  clarifai/workflows/__pycache__/utils.cpython-310.pyc,sha256=M9_KTM7GOOS5SPrWwAzqHDqyGvgKi3xuSGvyw6MNf-I,1925
231
231
  clarifai/workflows/__pycache__/validate.cpython-310.pyc,sha256=c18Jgp_-CAm8RD_tmUpDCPoqZeexaoWELG0yBzb9rjw,2149
232
- clarifai-11.1.7rc4.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
233
- clarifai-11.1.7rc4.dist-info/METADATA,sha256=kufmH0KOUsaV0LTIz5ckGf8oF4zfCr5giQbrsAucNRo,22215
234
- clarifai-11.1.7rc4.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
235
- clarifai-11.1.7rc4.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
236
- clarifai-11.1.7rc4.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
237
- clarifai-11.1.7rc4.dist-info/RECORD,,
232
+ clarifai-11.1.7rc6.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
233
+ clarifai-11.1.7rc6.dist-info/METADATA,sha256=9IsUiXR999olaPeOHXDj7yVzwdc5LjoNfMxNCbIGD1c,22453
234
+ clarifai-11.1.7rc6.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
235
+ clarifai-11.1.7rc6.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
236
+ clarifai-11.1.7rc6.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
237
+ clarifai-11.1.7rc6.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5