clarifai 10.9.0__py3-none-any.whl → 10.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.9.0"
1
+ __version__ = "10.9.2"
@@ -628,6 +628,28 @@ class Dataset(Lister, BaseClient):
628
628
  if delete_version:
629
629
  self.delete_version(dataset_version_id)
630
630
 
631
+ def merge_dataset(self, merge_dataset_id: str) -> None:
632
+ """Merges the another dataset into current dataset.
633
+
634
+ Args:
635
+ merge_dataset_id (str): The dataset ID of the dataset to merge.
636
+
637
+ Example:
638
+ >>> from clarifai.client.dataset import Dataset
639
+ >>> dataset = Dataset(dataset_id='dataset_id', user_id='user_id', app_id='app_id')
640
+ >>> dataset.merge_dataset(merge_dataset_id='merge_dataset_id')
641
+ """
642
+ dataset_filter = resources_pb2.Filter(
643
+ input=resources_pb2.Input(dataset_ids=[merge_dataset_id]))
644
+ query = resources_pb2.Search(query=resources_pb2.Query(filters=[dataset_filter]))
645
+ request = service_pb2.PostDatasetInputsRequest(
646
+ user_app_id=self.user_app_id, dataset_id=self.id, search=query)
647
+
648
+ response = self._grpc_request(self.STUB.PostDatasetInputs, request)
649
+ if response.status.code != status_code_pb2.SUCCESS:
650
+ raise Exception(response.status)
651
+ self.logger.info("\nDataset Merged\n%s", response.status)
652
+
631
653
  def archive_zip(self, wait: bool = True) -> str:
632
654
  """Exports the dataset to a zip file URL."""
633
655
  request = service_pb2.PutDatasetVersionExportsRequest(
@@ -64,6 +64,10 @@ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
64
64
  ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
65
65
  ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
66
66
 
67
+ # Set the NUMBA cache dir to /tmp
68
+ ENV NUMBA_CACHE_DIR=/tmp/numba_cache
69
+ ENV HOME=/tmp
70
+
67
71
  # Set the working directory to /app
68
72
  WORKDIR /app
69
73
 
@@ -0,0 +1,196 @@
1
+ import argparse
2
+ import importlib.util
3
+ import inspect
4
+ import os
5
+ import shutil
6
+ import subprocess
7
+ import sys
8
+ import tempfile
9
+ import traceback
10
+ import venv
11
+
12
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
13
+ from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
14
+ from clarifai_protocol import BaseRunner
15
+
16
+ from clarifai.runners.models.model_upload import ModelUploader
17
+ from clarifai.utils.logging import logger
18
+
19
+
20
+ class ModelRunLocally:
21
+
22
+ def __init__(self, model_path):
23
+ self.model_path = model_path
24
+ self.requirements_file = os.path.join(self.model_path, "requirements.txt")
25
+
26
+ def create_temp_venv(self):
27
+ """Create a temporary virtual environment."""
28
+ logger.info("Creating temporary virtual environment...")
29
+ temp_dir = tempfile.mkdtemp()
30
+ venv_dir = os.path.join(temp_dir, "venv")
31
+ venv.create(venv_dir, with_pip=True)
32
+
33
+ self.venv_dir = venv_dir
34
+ self.temp_dir = temp_dir
35
+ self.python_executable = os.path.join(venv_dir, "bin", "python")
36
+
37
+ logger.info(f"Created temporary virtual environment at {venv_dir}")
38
+ return venv_dir, temp_dir
39
+
40
+ def install_requirements(self):
41
+ """Install the dependencies from requirements.txt and Clarifai."""
42
+ pip_executable = os.path.join(self.venv_dir, "bin", "pip")
43
+ try:
44
+ logger.info(
45
+ f"Installing requirements from {self.requirements_file}... in the virtual environment {self.venv_dir}"
46
+ )
47
+ subprocess.check_call([pip_executable, "install", "-r", self.requirements_file])
48
+ logger.info("Installing Clarifai package...")
49
+ subprocess.check_call([pip_executable, "install", "clarifai"])
50
+ logger.info("Requirements installed successfully!")
51
+ except subprocess.CalledProcessError as e:
52
+ logger.error(f"Error installing requirements: {e}")
53
+ self.clean_up()
54
+ sys.exit(1)
55
+
56
+ def _get_model_runner(self):
57
+ """Dynamically import the runner class from the model file."""
58
+
59
+ # import the runner class that to be implement by the user
60
+ runner_path = os.path.join(self.model_path, "1", "model.py")
61
+
62
+ # arbitrary name given to the module to be imported
63
+ module = "runner_module"
64
+
65
+ spec = importlib.util.spec_from_file_location(module, runner_path)
66
+ runner_module = importlib.util.module_from_spec(spec)
67
+ sys.modules[module] = runner_module
68
+ spec.loader.exec_module(runner_module)
69
+
70
+ # Find all classes in the model.py file that are subclasses of BaseRunner
71
+ classes = [
72
+ cls for _, cls in inspect.getmembers(runner_module, inspect.isclass)
73
+ if issubclass(cls, BaseRunner) and cls.__module__ == runner_module.__name__
74
+ ]
75
+
76
+ # Ensure there is exactly one subclass of BaseRunner in the model.py file
77
+ if len(classes) != 1:
78
+ raise Exception("Expected exactly one subclass of BaseRunner, found: {}".format(
79
+ len(classes)))
80
+
81
+ MyRunner = classes[0]
82
+ return MyRunner
83
+
84
+ def _build_request(self):
85
+ """Create a mock inference request for testing the model."""
86
+
87
+ uploader = ModelUploader(self.model_path)
88
+ model_version_proto = uploader.get_model_version_proto()
89
+ model_version_proto.id = "model_version"
90
+
91
+ return service_pb2.PostModelOutputsRequest(
92
+ model=resources_pb2.Model(model_version=model_version_proto),
93
+ inputs=[
94
+ resources_pb2.Input(data=resources_pb2.Data(
95
+ text=resources_pb2.Text(raw="How many people live in new york?"),
96
+ image=resources_pb2.Image(url="https://samples.clarifai.com/metro-north.jpg"),
97
+ audio=resources_pb2.Audio(url="https://samples.clarifai.com/GoodMorning.wav"),
98
+ ))
99
+ ],
100
+ )
101
+
102
+ def _run_model_inference(self, runner):
103
+ """Perform inference using the runner."""
104
+ request = self._build_request()
105
+
106
+ try:
107
+ return runner.predict(request)
108
+ except Exception as e:
109
+ logger.error(f"Model Prediction failed: {e}")
110
+ traceback.print_exc()
111
+ return service_pb2.MultiOutputResponse(status=status_pb2.Status(
112
+ code=status_code_pb2.MODEL_PREDICTION_FAILED,
113
+ description="Prediction failed",
114
+ details="",
115
+ internal_details=str(e),
116
+ ))
117
+
118
+ def _run_test(self):
119
+ """Test the model locally by making a prediction."""
120
+ # validate that we have checkpoints downloaded before constructing MyRunner
121
+ uploader = ModelUploader(self.model_path)
122
+ uploader.download_checkpoints()
123
+ # construct MyRunner which will call load_model()
124
+ MyRunner = self._get_model_runner()
125
+ runner = MyRunner(
126
+ runner_id="n/a",
127
+ nodepool_id="n/a",
128
+ compute_cluster_id="n/a",
129
+ )
130
+
131
+ # send an inference.
132
+ response = self._run_model_inference(runner)
133
+ if response.outputs[0].status.code != status_code_pb2.SUCCESS:
134
+ logger.error(f"Moddel Prediction failed: {response}")
135
+ else:
136
+ logger.info(f"Model Prediction succeeded: {response}")
137
+
138
+ def test_model(self):
139
+ """Test the model by running it locally in the virtual environment."""
140
+ command = [
141
+ self.python_executable,
142
+ "-c",
143
+ f"import sys; sys.path.append('{os.path.dirname(os.path.abspath(__file__))}'); "
144
+ f"from model_run_locally import ModelRunLocally; ModelRunLocally('{self.model_path}')._run_test()",
145
+ ]
146
+ try:
147
+ logger.info("Testing the model locally...")
148
+ subprocess.check_call(command)
149
+ logger.info("Model tested successfully!")
150
+ except subprocess.CalledProcessError as e:
151
+ logger.error(f"Error testing the model: {e}")
152
+ sys.exit(1)
153
+
154
+ # run the model server
155
+ def run_model_server(self):
156
+ """Run the Clarifai Runners's model server."""
157
+
158
+ command = [
159
+ self.python_executable, "-m", "clarifai.runners.server", "--model_path", self.model_path,
160
+ "--start_dev_server"
161
+ ]
162
+ try:
163
+ logger.info(f"Starting model server with model at {self.model_path}...")
164
+ subprocess.check_call(command)
165
+ logger.info("Model server started successfully!")
166
+ except subprocess.CalledProcessError as e:
167
+ logger.error(f"Error running model server: {e}")
168
+ self.clean_up()
169
+ sys.exit(1)
170
+
171
+ def clean_up(self):
172
+ """Clean up the temporary virtual environment."""
173
+ if os.path.exists(self.temp_dir):
174
+ logger.info("Cleaning up temporary virtual environment...")
175
+ shutil.rmtree(self.temp_dir)
176
+
177
+
178
+ def main():
179
+ parser = argparse.ArgumentParser()
180
+ parser.add_argument(
181
+ '--model_path', type=str, required=True, help='Path of the model folder to upload')
182
+ args = parser.parse_args()
183
+
184
+ model_path = args.model_path
185
+ manager = ModelRunLocally(model_path)
186
+ manager.create_temp_venv()
187
+
188
+ try:
189
+ manager.install_requirements()
190
+ manager.test_model()
191
+ finally:
192
+ manager.clean_up()
193
+
194
+
195
+ if __name__ == "__main__":
196
+ main()
@@ -10,7 +10,7 @@ from google.protobuf import json_format
10
10
  from rich import print
11
11
 
12
12
  from clarifai.client import BaseClient
13
- from clarifai.runners.utils.loader import HuggingFaceLoarder
13
+ from clarifai.runners.utils.loader import HuggingFaceLoader
14
14
  from clarifai.urls.helper import ClarifaiUrlHelper
15
15
  from clarifai.utils.logging import logger
16
16
 
@@ -59,6 +59,24 @@ class ModelUploader:
59
59
  config = yaml.safe_load(file)
60
60
  return config
61
61
 
62
+ def _validate_config_checkpoints(self):
63
+
64
+ assert "type" in self.config.get("checkpoints"), "No loader type specified in the config file"
65
+ loader_type = self.config.get("checkpoints").get("type")
66
+ if not loader_type:
67
+ logger.info("No loader type specified in the config file for checkpoints")
68
+ assert loader_type == "huggingface", "Only huggingface loader supported for now"
69
+ if loader_type == "huggingface":
70
+ assert "repo_id" in self.config.get("checkpoints"), "No repo_id specified in the config file"
71
+ repo_id = self.config.get("checkpoints").get("repo_id")
72
+
73
+ # prefer env var for HF_TOKEN but if not provided then use the one from config.yaml if any.
74
+ if 'HF_TOKEN' in os.environ:
75
+ hf_token = os.environ['HF_TOKEN']
76
+ else:
77
+ hf_token = self.config.get("checkpoints").get("hf_token", None)
78
+ return repo_id, hf_token
79
+
62
80
  @property
63
81
  def client(self):
64
82
  if self._client is None:
@@ -182,31 +200,18 @@ class ModelUploader:
182
200
  def download_checkpoints(self):
183
201
  if not self.config.get("checkpoints"):
184
202
  logger.info("No checkpoints specified in the config file")
185
- return
186
-
187
- assert "type" in self.config.get("checkpoints"), "No loader type specified in the config file"
188
- loader_type = self.config.get("checkpoints").get("type")
189
- if not loader_type:
190
- logger.info("No loader type specified in the config file for checkpoints")
191
- assert loader_type == "huggingface", "Only huggingface loader supported for now"
192
- if loader_type == "huggingface":
193
- assert "repo_id" in self.config.get("checkpoints"), "No repo_id specified in the config file"
194
- repo_id = self.config.get("checkpoints").get("repo_id")
203
+ return True
195
204
 
196
- # prefer env var for HF_TOKEN but if not provided then use the one from config.yaml if any.
197
- if 'HF_TOKEN' in os.environ:
198
- hf_token = os.environ['HF_TOKEN']
199
- else:
200
- hf_token = self.config.get("checkpoints").get("hf_token", None)
201
- assert hf_token != 'hf_token', "The default 'hf_token' is not valid. Please provide a valid token or leave that field out of config.yaml if not needed."
202
- loader = HuggingFaceLoarder(repo_id=repo_id, token=hf_token)
205
+ repo_id, hf_token = self._validate_config_checkpoints()
203
206
 
204
- success = loader.download_checkpoints(self.checkpoint_path)
207
+ loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
208
+ success = loader.download_checkpoints(self.checkpoint_path)
205
209
 
206
- if not success:
207
- logger.error(f"Failed to download checkpoints for model {repo_id}")
208
- return
210
+ if not success:
211
+ logger.error(f"Failed to download checkpoints for model {repo_id}")
212
+ else:
209
213
  logger.info(f"Downloaded checkpoints for model {repo_id}")
214
+ return success
210
215
 
211
216
  def _concepts_protos_from_concepts(self, concepts):
212
217
  concept_protos = []
@@ -232,7 +237,7 @@ class ModelUploader:
232
237
  concepts = config.get('concepts')
233
238
  logger.info(f"Updated config.yaml with {len(concepts)} concepts.")
234
239
 
235
- def _get_model_version_proto(self):
240
+ def get_model_version_proto(self):
236
241
 
237
242
  model_version_proto = resources_pb2.ModelVersion(
238
243
  pretrained_model_config=resources_pb2.PretrainedModelConfig(),
@@ -242,22 +247,58 @@ class ModelUploader:
242
247
  model_type_id = self.config.get('model').get('model_type_id')
243
248
  if model_type_id in self.CONCEPTS_REQUIRED_MODEL_TYPE:
244
249
 
245
- loader = HuggingFaceLoarder()
246
- labels = loader.fetch_labels(self.checkpoint_path)
247
- # sort the concepts by id and then update the config file
248
- labels = sorted(labels.items(), key=lambda x: int(x[0]))
250
+ if 'concepts' in self.config:
251
+ labels = self.config.get('concepts')
252
+ logger.info(f"Found {len(labels)} concepts in the config file.")
253
+ for concept in labels:
254
+ concept_proto = json_format.ParseDict(concept, resources_pb2.Concept())
255
+ model_version_proto.output_info.data.concepts.append(concept_proto)
256
+ else:
257
+ labels = HuggingFaceLoader.fetch_labels(self.checkpoint_path)
258
+ logger.info(f"Found {len(labels)} concepts from the model checkpoints.")
259
+ # sort the concepts by id and then update the config file
260
+ labels = sorted(labels.items(), key=lambda x: int(x[0]))
249
261
 
250
- config_file = os.path.join(self.folder, 'config.yaml')
251
- self.hf_labels_to_config(labels, config_file)
262
+ config_file = os.path.join(self.folder, 'config.yaml')
263
+ self.hf_labels_to_config(labels, config_file)
252
264
 
253
- model_version_proto.output_info.data.concepts.extend(
254
- self._concepts_protos_from_concepts(labels))
265
+ model_version_proto.output_info.data.concepts.extend(
266
+ self._concepts_protos_from_concepts(labels))
255
267
  return model_version_proto
256
268
 
257
269
  def upload_model_version(self, download_checkpoints):
258
270
  file_path = f"{self.folder}.tar.gz"
259
271
  logger.info(f"Will tar it into file: {file_path}")
260
272
 
273
+ model_type_id = self.config.get('model').get('model_type_id')
274
+
275
+ if (model_type_id in self.CONCEPTS_REQUIRED_MODEL_TYPE) and 'concepts' not in self.config:
276
+ logger.info(
277
+ f"Model type {model_type_id} requires concepts to be specified in the config.yaml file.."
278
+ )
279
+ if self.config.get("checkpoints"):
280
+ logger.info(
281
+ "Checkpoints specified in the config.yaml file, will download the HF model's config.json file to infer the concepts."
282
+ )
283
+
284
+ if not download_checkpoints and not HuggingFaceLoader.validate_config(
285
+ self.checkpoint_path):
286
+
287
+ input(
288
+ "Press Enter to download the HuggingFace model's config.json file to infer the concepts and continue..."
289
+ )
290
+ repo_id, hf_token = self._validate_config_checkpoints()
291
+ loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
292
+ loader.download_config(self.checkpoint_path)
293
+
294
+ else:
295
+ logger.error(
296
+ "No checkpoints specified in the config.yaml file to infer the concepts. Please either specify the concepts directly in the config.yaml file or include a checkpoints section to download the HF model's config.json file to infer the concepts."
297
+ )
298
+ return
299
+
300
+ model_version_proto = self.get_model_version_proto()
301
+
261
302
  if download_checkpoints:
262
303
  tar_cmd = f"tar --exclude=*~ -czvf {self.tar_file} -C {self.folder} ."
263
304
  else: # we don't want to send the checkpoints up even if they are in the folder.
@@ -268,8 +309,6 @@ class ModelUploader:
268
309
  os.system(tar_cmd)
269
310
  logger.info("Tarring complete, about to start upload.")
270
311
 
271
- model_version_proto = self._get_model_version_proto()
272
-
273
312
  file_size = os.path.getsize(self.tar_file)
274
313
  logger.info(f"Size of the tar is: {file_size} bytes")
275
314
 
@@ -6,7 +6,9 @@ import subprocess
6
6
  from clarifai.utils.logging import logger
7
7
 
8
8
 
9
- class HuggingFaceLoarder:
9
+ class HuggingFaceLoader:
10
+
11
+ HF_DOWNLOAD_TEXT = "The 'huggingface_hub' package is not installed. Please install it using 'pip install huggingface_hub'."
10
12
 
11
13
  def __init__(self, repo_id=None, token=None):
12
14
  self.repo_id = repo_id
@@ -14,22 +16,26 @@ class HuggingFaceLoarder:
14
16
  if token:
15
17
  try:
16
18
  if importlib.util.find_spec("huggingface_hub") is None:
17
- raise ImportError(
18
- "The 'huggingface_hub' package is not installed. Please install it using 'pip install huggingface_hub'."
19
- )
19
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
20
20
  os.environ['HF_TOKEN'] = token
21
+ from huggingface_hub import HfApi
22
+
23
+ api = HfApi()
24
+ api.whoami(token=token)
25
+
21
26
  subprocess.run(f'huggingface-cli login --token={os.environ["HF_TOKEN"]}', shell=True)
22
27
  except Exception as e:
23
- Exception("Error setting up Hugging Face token ", e)
28
+ logger.error(
29
+ f"Error setting up Hugging Face token, please make sure you have the correct token: {e}"
30
+ )
31
+ logger.info("Continuing without Hugging Face token")
24
32
 
25
33
  def download_checkpoints(self, checkpoint_path: str):
26
34
  # throw error if huggingface_hub wasn't installed
27
35
  try:
28
36
  from huggingface_hub import snapshot_download
29
37
  except ImportError:
30
- raise ImportError(
31
- "The 'huggingface_hub' package is not installed. Please install it using 'pip install huggingface_hub'."
32
- )
38
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
33
39
  if os.path.exists(checkpoint_path) and self.validate_download(checkpoint_path):
34
40
  logger.info("Checkpoints already exist")
35
41
  return True
@@ -52,22 +58,56 @@ class HuggingFaceLoarder:
52
58
  return False
53
59
  return True
54
60
 
61
+ def download_config(self, checkpoint_path: str):
62
+ # throw error if huggingface_hub wasn't installed
63
+ try:
64
+ from huggingface_hub import hf_hub_download
65
+ except ImportError:
66
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
67
+ if os.path.exists(checkpoint_path) and os.path.exists(
68
+ os.path.join(checkpoint_path, 'config.json')):
69
+ logger.info("HF model's config.json already exists")
70
+ return True
71
+ os.makedirs(checkpoint_path, exist_ok=True)
72
+ try:
73
+ is_hf_model_exists = self.validate_hf_model()
74
+ if not is_hf_model_exists:
75
+ logger.error("Model %s not found on Hugging Face" % (self.repo_id))
76
+ return False
77
+ hf_hub_download(repo_id=self.repo_id, filename='config.json', local_dir=checkpoint_path)
78
+ except Exception as e:
79
+ logger.error(f"Error downloading model's config.json {e}")
80
+ return False
81
+ return True
82
+
55
83
  def validate_hf_model(self,):
56
84
  # check if model exists on HF
57
-
58
- from huggingface_hub import file_exists, repo_exists
85
+ try:
86
+ from huggingface_hub import file_exists, repo_exists
87
+ except ImportError:
88
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
59
89
  return repo_exists(self.repo_id) and file_exists(self.repo_id, 'config.json')
60
90
 
61
91
  def validate_download(self, checkpoint_path: str):
62
92
  # check if model exists on HF
63
- from huggingface_hub import list_repo_files
93
+ try:
94
+ from huggingface_hub import list_repo_files
95
+ except ImportError:
96
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
64
97
  checkpoint_dir_files = [
65
98
  f for dp, dn, fn in os.walk(os.path.expanduser(checkpoint_path)) for f in fn
66
99
  ]
67
100
  return (len(checkpoint_dir_files) >= len(list_repo_files(self.repo_id))) and len(
68
101
  list_repo_files(self.repo_id)) > 0
69
102
 
70
- def fetch_labels(self, checkpoint_path: str):
103
+ @staticmethod
104
+ def validate_config(checkpoint_path: str):
105
+ # check if downloaded config.json exists
106
+ return os.path.exists(checkpoint_path) and os.path.exists(
107
+ os.path.join(checkpoint_path, 'config.json'))
108
+
109
+ @staticmethod
110
+ def fetch_labels(checkpoint_path: str):
71
111
  # Fetch labels for classification, detection and segmentation models
72
112
  config_path = os.path.join(checkpoint_path, 'config.json')
73
113
  with open(config_path, 'r') as f:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: clarifai
3
- Version: 10.9.0
3
+ Version: 10.9.2
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: clarifai-grpc >=10.8.7
23
+ Requires-Dist: clarifai-grpc >=10.9.4
24
24
  Requires-Dist: clarifai-protocol >=0.0.6
25
25
  Requires-Dist: numpy >=1.22.0
26
26
  Requires-Dist: tqdm >=4.65.0
@@ -1,4 +1,4 @@
1
- clarifai/__init__.py,sha256=Nt0sCLO5SfzhmX3Z2TYY4ZxvmbALrHAzZ1S3yAjDwgQ,23
1
+ clarifai/__init__.py,sha256=pIn_ah9SsqsljuRvOukQQ4KCQQ4aaDew3A8OtaeuEqU,23
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
@@ -6,7 +6,7 @@ clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,6
6
6
  clarifai/client/app.py,sha256=6pckYme1urV2YJjLIYfeZ-vH0Z5YSQa51jzIMcEfwug,38342
7
7
  clarifai/client/base.py,sha256=hSHOqkXbSKyaRDeylMMnkhUHCAHhEqno4KI0CXGziBA,7536
8
8
  clarifai/client/compute_cluster.py,sha256=lntZDLVDhS71Yj7mZrgq5uhnAuNPUnj48i3zMSuoUpk,8693
9
- clarifai/client/dataset.py,sha256=oqp6ryg7IyxCZcItzownadYJKK0s1DtghHwITN71_6E,30160
9
+ clarifai/client/dataset.py,sha256=Xt8M9sP7RLeAqqTu17-8cpupThhkU9n_bl6jXtG_25A,31148
10
10
  clarifai/client/deployment.py,sha256=4gfvUvQY9adFS98B0vP9C5fR9OnDRV2JbUIdAkMymT8,2551
11
11
  clarifai/client/input.py,sha256=cEVRytrMF1gCgwHLbXlSbPSEQN8uHpUAoKcCdyHO1pc,44406
12
12
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
@@ -56,17 +56,18 @@ clarifai/rag/utils.py,sha256=yr1jAcbpws4vFGBqlAwPPE7v1DRba48g8gixLFw8OhQ,4070
56
56
  clarifai/runners/__init__.py,sha256=3vr4RVvN1IRy2SxJpyycAAvrUBbH-mXR7pqUmu4w36A,412
57
57
  clarifai/runners/server.py,sha256=CVLrv2DjzCvKVXcJ4SWvcFWUZq0bdlBmyEpfVlfgT2A,4902
58
58
  clarifai/runners/dockerfile_template/Dockerfile.cpu.template,sha256=B35jcpqWBP3ALa2WRtbtBg8uvDyqP_PWZnJtIeAnjT0,1222
59
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=TMqTZBN1exMYzjLotn17DO4Je0rg9pBapIuwdohwht8,3228
59
+ clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=8uQp2sX_bIzgQk84FNlS19PwKH_l0Qi54xE7_NVxUTE,3314
60
60
  clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
61
  clarifai/runners/models/base_typed_model.py,sha256=OnAk08Lo2Y1fGiBc6JJ6UvJ8P435cTsikTNYDkStDpI,7790
62
62
  clarifai/runners/models/model_class.py,sha256=9JSPAr4U4K7xI0kSl-q0mHB06zknm2OR-8XIgBCto94,1611
63
+ clarifai/runners/models/model_run_locally.py,sha256=eP_QFx3fuoKbf5VyRm-rfD9k6fLrXG7u1EaWz-Qjr68,6720
63
64
  clarifai/runners/models/model_runner.py,sha256=3vzoastQxkGRDK8T9aojDsLNBb9A3IiKm6YmbFrE9S0,6241
64
65
  clarifai/runners/models/model_servicer.py,sha256=L5AuqKDZrsKOnv_Fz1Ld4-nzqehltLTsYAS7NIclm1g,2880
65
- clarifai/runners/models/model_upload.py,sha256=or1yUlBLOFM9gD3Jjg6Vc9zhpK9uqnRrp4B1bV5VCKM,15985
66
+ clarifai/runners/models/model_upload.py,sha256=7Oi09Y93WUH7EKTzrKf6JQawZzLj3EnmWUDUgHOrFq8,17654
66
67
  clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
67
68
  clarifai/runners/utils/data_handler.py,sha256=sxy9zlAgI6ETuxCQhUgEXAn2GCsaW1GxpK6GTaMne0g,6966
68
69
  clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
69
- clarifai/runners/utils/loader.py,sha256=taWTf-TCIZKh8jjwiFDYg3EqtJPXqn9EFoedIbnYXE8,2811
70
+ clarifai/runners/utils/loader.py,sha256=1oktDUQA1Lpv0NiCXFwoxpp0jqqbvB7sWvpymwyWY2E,4243
70
71
  clarifai/runners/utils/url_fetcher.py,sha256=-Hwjb1SURszn7zUVwi4Of0-nrksfZy-uqT4SvPGCgSU,1446
71
72
  clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
72
73
  clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
@@ -83,9 +84,9 @@ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
83
84
  clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
84
85
  clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
85
86
  clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
86
- clarifai-10.9.0.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
87
- clarifai-10.9.0.dist-info/METADATA,sha256=oP8QYgz6MkknQDIQ9ky3lWJS_NwvSVp4nK4o8VIcH20,19479
88
- clarifai-10.9.0.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
89
- clarifai-10.9.0.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
90
- clarifai-10.9.0.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
91
- clarifai-10.9.0.dist-info/RECORD,,
87
+ clarifai-10.9.2.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
88
+ clarifai-10.9.2.dist-info/METADATA,sha256=BcdNjVzD08g4xW4RgxN-zkLGLsG2P590d6tAzY4f5ME,19479
89
+ clarifai-10.9.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
90
+ clarifai-10.9.2.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
91
+ clarifai-10.9.2.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
92
+ clarifai-10.9.2.dist-info/RECORD,,