clarifai 11.1.7rc5__py3-none-any.whl → 11.1.7rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "11.1.7rc5"
1
+ __version__ = "11.1.7rc7"
clarifai/client/model.py CHANGED
@@ -84,6 +84,7 @@ class Model(Lister, BaseClient):
84
84
  compute_cluster_id=compute_cluster_id,
85
85
  nodepool_id=nodepool_id,
86
86
  deployment_id=deployment_id,
87
+ user_id=self.user_id, # FIXME the deployment's user_id can be different than the model's.
87
88
  )
88
89
  BaseClient.__init__(
89
90
  self,
@@ -802,13 +803,14 @@ class Model(Lister, BaseClient):
802
803
  elif args:
803
804
  inputs = args[0]
804
805
  if inputs and isinstance(inputs, Iterable):
805
- inputs_iter = iter(inputs)
806
+ inputs_iter = inputs
806
807
  try:
807
808
  peek = next(inputs_iter)
808
809
  except StopIteration:
809
810
  pass
810
811
  else:
811
- use_proto_call = isinstance(peek, resources_pb2.Input)
812
+ use_proto_call = (peek and isinstance(peek, list) and
813
+ isinstance(peek[0], resources_pb2.Input))
812
814
  # put back the peeked value
813
815
  if inputs_iter is inputs:
814
816
  inputs = itertools.chain([peek], inputs_iter)
@@ -6,6 +6,7 @@ from clarifai_grpc.grpc.api.status import status_code_pb2
6
6
 
7
7
  from clarifai.constants.model import MAX_MODEL_PREDICT_INPUTS
8
8
  from clarifai.errors import UserError
9
+ from clarifai.runners.utils.data_utils import is_openai_chat_format
9
10
  from clarifai.runners.utils.method_signatures import (CompatibilitySerializer, deserialize,
10
11
  get_stream_from_signature, serialize,
11
12
  signatures_from_json)
@@ -129,7 +130,8 @@ class ModelClient:
129
130
  batch_inputs = args[0]
130
131
  # Validate each input is a dictionary
131
132
  is_batch_input_valid = all(isinstance(input, dict) for input in batch_inputs)
132
- if is_batch_input_valid:
133
+ if is_batch_input_valid and (not is_openai_chat_format(batch_inputs)):
134
+ # If the batch input is valid, call the function with the batch inputs and the method name
133
135
  return call_func(batch_inputs, method_name)
134
136
 
135
137
  for name, arg in zip(method_argnames, args): # handle positional with zip shortest
@@ -201,6 +203,8 @@ class ModelClient:
201
203
  proto = resources_pb2.Input()
202
204
 
203
205
  serialize(input, input_signature, proto.data)
206
+ print("input:", input)
207
+ print("proto.data:", proto.data)
204
208
  proto_inputs.append(proto)
205
209
 
206
210
  response = self._predict_by_proto(proto_inputs, method_name)
@@ -293,7 +297,8 @@ class ModelClient:
293
297
  outputs.append(deserialize(output.data, output_signature, is_output=True))
294
298
  if batch_input:
295
299
  yield outputs
296
- yield outputs[0]
300
+ else:
301
+ yield outputs[0]
297
302
 
298
303
  def _generate_by_proto(
299
304
  self,
@@ -421,8 +426,9 @@ class ModelClient:
421
426
  else:
422
427
  req.inputs.append(inputs)
423
428
  # TODO: put into new proto field?
424
- for inp in req.inputs:
425
- inp.data.metadata['_method_name'] = method_name
429
+ if method_name:
430
+ for inp in req.inputs:
431
+ inp.data.metadata['_method_name'] = method_name
426
432
  yield req
427
433
 
428
434
  def _stream_by_proto(self,
@@ -108,7 +108,7 @@ class ModelBuilder:
108
108
 
109
109
  # Mock all third-party imports to avoid ImportErrors or other issues
110
110
  return MagicMock()
111
-
111
+
112
112
  if mocking:
113
113
  # Replace the built-in __import__ function with our custom one
114
114
  builtins.__import__ = custom_import
@@ -163,7 +163,7 @@ class ModelClass(ABC):
163
163
  request = next(request_iterator) # get first request to determine method
164
164
  assert len(request.inputs) == 1, "Streaming requires exactly one input"
165
165
 
166
- method_name = 'generate'
166
+ method_name = 'stream'
167
167
  inference_params = get_inference_params(request)
168
168
  if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
169
169
  method_name = request.inputs[0].data.metadata['_method_name']
@@ -224,7 +224,15 @@ class ModelClass(ABC):
224
224
  for k, v in kwargs.items():
225
225
  if k not in python_param_types:
226
226
  continue
227
- kwargs[k] = data_types.cast(v, python_param_types[k])
227
+
228
+ if hasattr(python_param_types[k], "__args__") and getattr(
229
+ python_param_types[k], "__origin__", None) == data_types.Stream:
230
+ # get the type of the items in the stream
231
+ stream_type = python_param_types[k].__args__[0]
232
+
233
+ kwargs[k] = data_types.cast(v, stream_type)
234
+ else:
235
+ kwargs[k] = data_types.cast(v, python_param_types[k])
228
236
  result.append(kwargs)
229
237
  return result
230
238
 
@@ -7,6 +7,7 @@ import subprocess
7
7
  import sys
8
8
  import tempfile
9
9
  import time
10
+ import traceback
10
11
  import venv
11
12
 
12
13
  from clarifai_grpc.grpc.api import resources_pb2, service_pb2
@@ -114,7 +115,12 @@ class ModelRunLocally:
114
115
  model = self.builder.create_model_instance()
115
116
  # call its test method, if it has one
116
117
  if hasattr(model, "test"):
117
- model.test()
118
+ try:
119
+ model.test()
120
+ logger.info("Model tested successfully!")
121
+ except Exception as e:
122
+ logger.error(f"Error occurred while testing the model: {e}")
123
+ traceback.print_exc()
118
124
 
119
125
  def test_model(self):
120
126
  """Test the model by running it locally in the virtual environment."""
@@ -114,6 +114,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
114
114
  ensure_urls_downloaded(request)
115
115
 
116
116
  for resp in self.model.generate_wrapper(request):
117
+ if resp.status.code != status_code_pb2.SUCCESS:
118
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
119
+ continue
117
120
  successes = []
118
121
  for output in resp.outputs:
119
122
  if not output.HasField('status') or not output.status.code:
@@ -142,6 +145,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
142
145
  ) -> Iterator[service_pb2.RunnerItemOutput]:
143
146
  # Call the generate() method the underlying model implements.
144
147
  for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
148
+ if resp.status.code != status_code_pb2.SUCCESS:
149
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
150
+ continue
145
151
  successes = []
146
152
  for output in resp.outputs:
147
153
  if not output.HasField('status') or not output.status.code:
@@ -158,19 +158,20 @@ class Text(MessageData):
158
158
 
159
159
  class Concept(MessageData):
160
160
 
161
- def __init__(self, name: str, value: float = 0):
161
+ def __init__(self, id: str, name: str, value: float = 1):
162
+ self.id = id
162
163
  self.name = name
163
164
  self.value = value
164
165
 
165
166
  def __repr__(self) -> str:
166
- return f"Concept(name={self.name!r}, value={self.value})"
167
+ return f"Concept(id={self.id!r}, name={self.name!r}, value={self.value})"
167
168
 
168
169
  def to_proto(self):
169
- return ConceptProto(name=self.name, value=self.value)
170
+ return ConceptProto(id=self.id, name=self.name, value=self.value)
170
171
 
171
172
  @classmethod
172
173
  def from_proto(cls, proto: ConceptProto) -> "Concept":
173
- return cls(proto.name, proto.value)
174
+ return cls(proto.id, proto.name, proto.value)
174
175
 
175
176
 
176
177
  class Region(MessageData):
@@ -1,7 +1,12 @@
1
1
  from io import BytesIO
2
2
 
3
+ from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeEnumOption
4
+ from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeField as InputFieldProto
5
+ from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeRangeInfo
3
6
  from PIL import Image
4
7
 
8
+ from clarifai.runners.utils.data_types import MessageData
9
+
5
10
 
6
11
  def image_to_bytes(img: Image.Image, format="JPEG") -> bytes:
7
12
  buffered = BytesIO()
@@ -13,3 +18,166 @@ def image_to_bytes(img: Image.Image, format="JPEG") -> bytes:
13
18
  def bytes_to_image(bytes_img) -> Image.Image:
14
19
  img = Image.open(BytesIO(bytes_img))
15
20
  return img
21
+
22
+
23
+ def is_openai_chat_format(messages):
24
+ """
25
+ Verify if the given argument follows the OpenAI chat messages format.
26
+
27
+ Args:
28
+ messages (list): A list of dictionaries representing chat messages.
29
+
30
+ Returns:
31
+ bool: True if valid, False otherwise.
32
+ """
33
+ if not isinstance(messages, list):
34
+ return False
35
+
36
+ valid_roles = {"system", "user", "assistant", "function"}
37
+
38
+ for msg in messages:
39
+ if not isinstance(msg, dict):
40
+ return False
41
+ if "role" not in msg or "content" not in msg:
42
+ return False
43
+ if msg["role"] not in valid_roles:
44
+ return False
45
+
46
+ content = msg["content"]
47
+
48
+ # Content should be either a string (text message) or a multimodal list
49
+ if isinstance(content, str):
50
+ continue # Valid text message
51
+
52
+ elif isinstance(content, list):
53
+ for item in content:
54
+ if not isinstance(item, dict):
55
+ return False
56
+ return True
57
+
58
+
59
+ class InputField(MessageData):
60
+ """A field that can be used to store input data."""
61
+
62
+ def __init__(self,
63
+ default=None,
64
+ description=None,
65
+ min_value=None,
66
+ max_value=None,
67
+ choices=None,
68
+ visibility=True,
69
+ is_param=False):
70
+ self.default = default
71
+ self.description = description
72
+ self.min_value = min_value
73
+ self.max_value = max_value
74
+ self.choices = choices
75
+ self.visibility = visibility
76
+ self.is_param = is_param
77
+
78
+ def __repr__(self) -> str:
79
+ attrs = []
80
+ if self.default is not None:
81
+ attrs.append(f"default={self.default!r}")
82
+ if self.description is not None:
83
+ attrs.append(f"description={self.description!r}")
84
+ if self.min_value is not None:
85
+ attrs.append(f"min_value={self.min_value!r}")
86
+ if self.max_value is not None:
87
+ attrs.append(f"max_value={self.max_value!r}")
88
+ if self.choices is not None:
89
+ attrs.append(f"choices={self.choices!r}")
90
+ attrs.append(f"visibility={self.visibility!r}")
91
+ attrs.append(f"is_param={self.is_param!r}")
92
+ return f"InputField({', '.join(attrs)})"
93
+
94
+ def to_proto(self, proto=None) -> InputFieldProto:
95
+ if proto is None:
96
+ proto = InputFieldProto()
97
+ if self.description is not None:
98
+ proto.description = self.description
99
+
100
+ if self.choices is not None:
101
+ for choice in self.choices:
102
+ option = ModelTypeEnumOption(id=str(choice))
103
+ proto.model_type_enum_options.append(option)
104
+
105
+ proto.required = self.default is None
106
+
107
+ if self.min_value is not None or self.max_value is not None:
108
+ range_info = ModelTypeRangeInfo()
109
+ if self.min_value is not None:
110
+ range_info.min = float(self.min_value)
111
+ if self.max_value is not None:
112
+ range_info.max = float(self.max_value)
113
+ proto.model_type_range_info.CopyFrom(range_info)
114
+
115
+ proto.visibility = self.visibility
116
+ proto.is_param = self.is_param
117
+
118
+ if self.default is not None:
119
+ if isinstance(self.default, str) or isinstance(self.default, bool) or isinstance(
120
+ self.default, (int, float)):
121
+ proto.default = str(self.default)
122
+ else:
123
+ import json
124
+ proto.default = json.dumps(self.default)
125
+
126
+ return proto
127
+
128
+ @classmethod
129
+ def from_proto(cls, proto):
130
+ default = None
131
+ if proto.HasField('default'):
132
+ pb_value = proto.default
133
+ if pb_value.HasField('string_value'):
134
+ default = pb_value.string_value
135
+ try:
136
+ import json
137
+ default = json.loads(default)
138
+ except json.JSONDecodeError:
139
+ pass
140
+ elif pb_value.HasField('number_value'):
141
+ default = pb_value.number_value
142
+ if default.is_integer():
143
+ default = int(default)
144
+ else:
145
+ default = float(default)
146
+ elif pb_value.HasField('bool_value'):
147
+ default = pb_value.bool_value
148
+
149
+ choices = [option.id for option in proto.model_type_enum_options
150
+ ] if proto.model_type_enum_options else None
151
+
152
+ min_value = None
153
+ max_value = None
154
+ if proto.HasField('model_type_range_info'):
155
+ min_value = proto.model_type_range_info.min
156
+ max_value = proto.model_type_range_info.max
157
+ if min_value.is_integer():
158
+ min_value = int(min_value)
159
+ if max_value.is_integer():
160
+ max_value = int(max_value)
161
+
162
+ return cls(
163
+ default=default,
164
+ description=proto.description if proto.description else None,
165
+ min_value=min_value,
166
+ max_value=max_value,
167
+ choices=choices,
168
+ visibility=proto.visibility,
169
+ is_param=proto.is_param)
170
+
171
+ @classmethod
172
+ def set_default(cls, proto=None, default=None):
173
+
174
+ if proto is None:
175
+ proto = InputFieldProto()
176
+ if default is not None:
177
+ if isinstance(default, str) or isinstance(default, bool) or isinstance(
178
+ default, (int, float)):
179
+ proto.default = str(default)
180
+ else:
181
+ import json
182
+ proto.default = json.dumps(default)
183
+ return proto
@@ -1,7 +1,7 @@
1
1
  import inspect
2
2
  import json
3
3
  from collections import namedtuple
4
- from typing import List, Tuple, get_args, get_origin
4
+ from typing import Dict, List, Tuple, get_args, get_origin
5
5
 
6
6
  import numpy as np
7
7
  import PIL.Image
@@ -10,7 +10,7 @@ from clarifai_grpc.grpc.api import resources_pb2
10
10
  from google.protobuf.json_format import MessageToDict, ParseDict
11
11
  from google.protobuf.message import Message as MessageProto
12
12
 
13
- from clarifai.runners.utils import data_types
13
+ from clarifai.runners.utils import data_types, data_utils
14
14
  from clarifai.runners.utils.serializers import (
15
15
  AtomicFieldSerializer, JSONSerializer, ListSerializer, MessageSerializer,
16
16
  NamedFieldsSerializer, NDArraySerializer, Serializer, TupleSerializer)
@@ -107,7 +107,10 @@ def build_variable_signature(name, annotation, default=inspect.Parameter.empty,
107
107
  if not is_output:
108
108
  sig.required = (default is inspect.Parameter.empty)
109
109
  if not sig.required:
110
- sig.default = str(default)
110
+ if isinstance(default, data_utils.InputField):
111
+ sig = default.to_proto(sig)
112
+ else:
113
+ sig = data_utils.InputField.set_default(sig, default)
111
114
 
112
115
  _fill_signature_type(sig, tp)
113
116
 
@@ -150,6 +153,7 @@ def _fill_signature_type(sig, tp):
150
153
  sig.type = resources_pb2.ModelTypeField.DataType.TUPLE
151
154
  for inner_type in args:
152
155
  inner_sig = sig.type_args.add()
156
+ inner_sig.name = sig.name + '_item'
153
157
  _fill_signature_type(inner_sig, inner_type)
154
158
  return
155
159
 
@@ -157,6 +161,7 @@ def _fill_signature_type(sig, tp):
157
161
  if origin == list:
158
162
  sig.type = resources_pb2.ModelTypeField.DataType.LIST
159
163
  inner_sig = sig.type_args.add()
164
+ inner_sig.name = sig.name + '_item'
160
165
  _fill_signature_type(inner_sig, args[0])
161
166
  return
162
167
 
@@ -237,7 +242,7 @@ def serialize(kwargs, signatures, proto=None, is_output=False):
237
242
  # if there is only one output, flatten it and return directly
238
243
  inline_first_value = True
239
244
  if signatures and signatures[0].type not in _NON_INLINABLE_TYPES:
240
- inline_first_value = True
245
+ inline_first_value = False
241
246
  for sig_i, sig in enumerate(signatures):
242
247
  if sig.name not in kwargs:
243
248
  if sig.required:
@@ -327,13 +332,6 @@ def _normalize_type(tp):
327
332
 
328
333
 
329
334
  def _normalize_data_type(tp):
330
-
331
- # jsonable list and dict, these can be serialized as json
332
- # (tuple we want to keep as a tuple for args and returns, so don't include here)
333
- if tp in (list, dict) or (get_origin(tp) in (list, dict) and _is_jsonable(tp) and
334
- get_args(tp) is None):
335
- return data_types.JSON
336
-
337
335
  # container types that need to be serialized as parts
338
336
  if get_origin(tp) == list and get_args(tp):
339
337
  return List[_normalize_data_type(get_args(tp)[0])]
@@ -371,6 +369,11 @@ def _normalize_data_type(tp):
371
369
  if tp == PIL.Image:
372
370
  raise TypeError('Use PIL.Image.Image instead of PIL.Image module')
373
371
 
372
+ # jsonable list and dict, these can be serialized as json
373
+ # (tuple we want to keep as a tuple for args and returns, so don't include here)
374
+ if tp in (list, dict, Dict) or (get_origin(tp) in (list, dict, Dict) and _is_jsonable(tp)):
375
+ return data_types.JSON
376
+
374
377
  # check for known data types
375
378
  try:
376
379
  if tp in _DATA_TYPES:
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: clarifai
3
- Version: 11.1.7rc5
3
+ Version: 11.1.7rc7
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: clarifai-grpc>=11.2.4
23
+ Requires-Dist: clarifai-grpc>=11.2.6
24
24
  Requires-Dist: clarifai-protocol>=0.0.20
25
25
  Requires-Dist: numpy>=1.22.0
26
26
  Requires-Dist: tqdm>=4.65.0
@@ -35,6 +35,17 @@ Requires-Dist: requests>=2.32.3
35
35
  Requires-Dist: aiohttp>=3.10.0
36
36
  Provides-Extra: all
37
37
  Requires-Dist: pycocotools==2.0.6; extra == "all"
38
+ Dynamic: author
39
+ Dynamic: author-email
40
+ Dynamic: classifier
41
+ Dynamic: description
42
+ Dynamic: description-content-type
43
+ Dynamic: home-page
44
+ Dynamic: license
45
+ Dynamic: provides-extra
46
+ Dynamic: requires-dist
47
+ Dynamic: requires-python
48
+ Dynamic: summary
38
49
 
39
50
  <h1 align="center">
40
51
  <a href="https://www.clarifai.com/"><img alt="Clarifai" title="Clarifai" src="https://github.com/user-attachments/assets/623b883b-7fe5-4b95-bbfa-8691f5779af4"></a>
@@ -1,4 +1,4 @@
1
- clarifai/__init__.py,sha256=L7ecgzwVF1qIM4sX3JzNrp3tI1YSfegldgbqVv-b-HQ,26
1
+ clarifai/__init__.py,sha256=UVb-Qd94W3trcR1tUhxvZWOZsQeO3zRk4WOaKuSTtK0,26
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
@@ -30,8 +30,8 @@ clarifai/client/dataset.py,sha256=y3zKT_VhP1gyN3OO-b3cPeW21ZXyKbQ7ZJkEG06bsTU,32
30
30
  clarifai/client/deployment.py,sha256=w7Y6pA1rYG4KRK1SwusRZc2sQRXlG8wezuVdzSWpCo0,2586
31
31
  clarifai/client/input.py,sha256=obMAHMDU1OwfXZ8KraOnGFlWzlW-3F7Ob_2lcOQMlhY,46339
32
32
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
33
- clarifai/client/model.py,sha256=DFlZLIExMUvYXc9hDzVLYka6_AbfG3vqLlhZkk4GfIY,76835
34
- clarifai/client/model_client.py,sha256=FwI9XEDZjyrubK3ue0-gyTKwK_lShQcm8SoFc66aXiw,17537
33
+ clarifai/client/model.py,sha256=HLTzCoGhZ5Ifm5x5nSFa4YULnLLlBpZF-29nfOcwFuY,76995
34
+ clarifai/client/model_client.py,sha256=B2M_yr-iuhgT1bJTontfN_KrJyfDRKfNW5ALM0Cufkc,17864
35
35
  clarifai/client/module.py,sha256=FTkm8s9m-EaTKN7g9MnLhGJ9eETUfKG7aWZ3o1RshYs,4204
36
36
  clarifai/client/nodepool.py,sha256=la3vTFrO4LX8zm2eQ5jqf2L0-kQ63Dano8FibadoZbk,10152
37
37
  clarifai/client/search.py,sha256=GaPWN6JmTQGZaCHr6U1yv0zqR6wKFl7i9IVLg2ul1CI,14254
@@ -138,11 +138,11 @@ clarifai/runners/dockerfile_template/Dockerfile.nim,sha256=CSdUAehj3uOwminioLnT5
138
138
  clarifai/runners/dockerfile_template/Dockerfile.template,sha256=5cjv7U8PmWa3DB_5B1CqSYh_6GE0E0np52TIAa7EIDE,2312
139
139
  clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
140
140
  clarifai/runners/models/base_typed_model.py,sha256=0QCWxch8CcyJSKvE1D4PILd2RSnQZHTmx4DXlQQ6dpo,7856
141
- clarifai/runners/models/model_builder.py,sha256=DDE4s1yzomsGyl4PbpjKdiofM6VhL0sgZujCP6wvF2k,34881
142
- clarifai/runners/models/model_class.py,sha256=qK5qk4geoFpqrRtl4VmKK90CIJKyWmD_vjJMqLWR_CQ,11854
141
+ clarifai/runners/models/model_builder.py,sha256=-GFmVqDr6Dw9zRyiFH2vRaJF2EAVTmj1OfaZPzhzfw8,34877
142
+ clarifai/runners/models/model_class.py,sha256=R32Nxxe-Ols4nyMq8KWer9jqKn2rDyBHr5ZQGDAhJxk,12178
143
143
  clarifai/runners/models/model_class_refract.py,sha256=HxuozxSW7ag5yWCPxjNwgLArQ6dORhyGXlnpPaZz2-c,3211
144
- clarifai/runners/models/model_run_locally.py,sha256=VZetm9Mko8MBjcjwr6PCnTU9gF3glgD5qvpbj-8tW2s,17962
145
- clarifai/runners/models/model_runner.py,sha256=qyc73pe4xc9BsUKHwnOyC9g-RNCARiFis4GTh-yg0vg,6219
144
+ clarifai/runners/models/model_run_locally.py,sha256=m5uLlXpWVGuQIJv-XBt-sHGx1gD8w6d77f9b85DOZqs,18170
145
+ clarifai/runners/models/model_runner.py,sha256=T4Qn_x0vky7XdeS54bvipzEmKZMra1tQdAu_u01yyjc,6503
146
146
  clarifai/runners/models/model_servicer.py,sha256=A--b1P71PBCAMJCpy_-fpNDkfCVdvdMh1LleW15dSas,3037
147
147
  clarifai/runners/models/model_upload.py,sha256=VjJgNNBPP9O7LkNCXxOqa0lTW1M7k6XKVyI6XlLdXIc,25095
148
148
  clarifai/runners/models/temp.py,sha256=0Xy2tAqA5eu2axuTanac995t5IQrnU1y8MdNm-7we6Q,404
@@ -159,11 +159,11 @@ clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3
159
159
  clarifai/runners/utils/const.py,sha256=bwj-Pcw558-pasdIFbNhnkn-9oiCdojYH1fNTTUG2gU,1048
160
160
  clarifai/runners/utils/data_handler.py,sha256=b7k6MWYPXSgjrfw6wsDf82xFYa0D7UjYmjE4mw5HzHM,8499
161
161
  clarifai/runners/utils/data_handler_refract.py,sha256=3M-V4hkOoF-9Ix4hE6ocXWiTJPc9dewtu6FMtddd-jQ,6343
162
- clarifai/runners/utils/data_types.py,sha256=A6IYU55pdPFfoh0K6HkEgTPlgQVv2JUG5lOlqTu1w44,12258
163
- clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
162
+ clarifai/runners/utils/data_types.py,sha256=xW3DrBP0uyyyAw_K7xuLhEfBDhTbGu2u1L-mKK2bhTY,12322
163
+ clarifai/runners/utils/data_utils.py,sha256=j_W_O57ciGbp2JDVLlau-V-DDrF6gsN6cjtLaL51AvA,5512
164
164
  clarifai/runners/utils/loader.py,sha256=SgNHMwRmCCymFQm8aDp73NmIUHhM-N60CBlTKbPzmVc,7470
165
165
  clarifai/runners/utils/logger.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
166
- clarifai/runners/utils/method_signatures.py,sha256=hwrZnSbnt4OSGqnrN7INghtuJm8EJm0CEnTpwlsGgmw,17468
166
+ clarifai/runners/utils/method_signatures.py,sha256=ZFqcPsjp9Fbih2u-xTLN3utZkHeOhBxtaiHMEzJKcCw,17657
167
167
  clarifai/runners/utils/serializers.py,sha256=S4sRsOVvH191vAGTRTAAdwLlQwlK4T5QVRDGPptg9nQ,7191
168
168
  clarifai/runners/utils/url_fetcher.py,sha256=v_8JOWmkyFAzsBulsieKX7Nfjy1Yg7wGSZeqfEvw2cg,1640
169
169
  clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc,sha256=0GGbXIecXlOZmQKMCkSRhEBY_a1zvoimv-mHG4pJuNA,167
@@ -229,9 +229,9 @@ clarifai/workflows/__pycache__/__init__.cpython-39.pyc,sha256=9nA--jULSW7OFrYOcs
229
229
  clarifai/workflows/__pycache__/export.cpython-310.pyc,sha256=phEGwi2gAojCUhRTqjZVeTDn7Gk6LCVBeSTjAj4m9iY,2418
230
230
  clarifai/workflows/__pycache__/utils.cpython-310.pyc,sha256=M9_KTM7GOOS5SPrWwAzqHDqyGvgKi3xuSGvyw6MNf-I,1925
231
231
  clarifai/workflows/__pycache__/validate.cpython-310.pyc,sha256=c18Jgp_-CAm8RD_tmUpDCPoqZeexaoWELG0yBzb9rjw,2149
232
- clarifai-11.1.7rc5.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
233
- clarifai-11.1.7rc5.dist-info/METADATA,sha256=tOQbvNsbo_DjwJRDarrhk1uAgnSjSCDfdlcaUucYhI8,22215
234
- clarifai-11.1.7rc5.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
235
- clarifai-11.1.7rc5.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
236
- clarifai-11.1.7rc5.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
237
- clarifai-11.1.7rc5.dist-info/RECORD,,
232
+ clarifai-11.1.7rc7.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
233
+ clarifai-11.1.7rc7.dist-info/METADATA,sha256=5YXRRR7I8rkzcai9pN5auQTaxd0M-X0jUKNaVk-qNmo,22453
234
+ clarifai-11.1.7rc7.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
235
+ clarifai-11.1.7rc7.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
236
+ clarifai-11.1.7rc7.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
237
+ clarifai-11.1.7rc7.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5