clarifai 10.9.1__py3-none-any.whl → 10.9.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.9.1"
1
+ __version__ = "10.9.2"
@@ -628,6 +628,28 @@ class Dataset(Lister, BaseClient):
628
628
  if delete_version:
629
629
  self.delete_version(dataset_version_id)
630
630
 
631
+ def merge_dataset(self, merge_dataset_id: str) -> None:
632
+ """Merges the another dataset into current dataset.
633
+
634
+ Args:
635
+ merge_dataset_id (str): The dataset ID of the dataset to merge.
636
+
637
+ Example:
638
+ >>> from clarifai.client.dataset import Dataset
639
+ >>> dataset = Dataset(dataset_id='dataset_id', user_id='user_id', app_id='app_id')
640
+ >>> dataset.merge_dataset(merge_dataset_id='merge_dataset_id')
641
+ """
642
+ dataset_filter = resources_pb2.Filter(
643
+ input=resources_pb2.Input(dataset_ids=[merge_dataset_id]))
644
+ query = resources_pb2.Search(query=resources_pb2.Query(filters=[dataset_filter]))
645
+ request = service_pb2.PostDatasetInputsRequest(
646
+ user_app_id=self.user_app_id, dataset_id=self.id, search=query)
647
+
648
+ response = self._grpc_request(self.STUB.PostDatasetInputs, request)
649
+ if response.status.code != status_code_pb2.SUCCESS:
650
+ raise Exception(response.status)
651
+ self.logger.info("\nDataset Merged\n%s", response.status)
652
+
631
653
  def archive_zip(self, wait: bool = True) -> str:
632
654
  """Exports the dataset to a zip file URL."""
633
655
  request = service_pb2.PutDatasetVersionExportsRequest(
@@ -64,6 +64,10 @@ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
64
64
  ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
65
65
  ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
66
66
 
67
+ # Set the NUMBA cache dir to /tmp
68
+ ENV NUMBA_CACHE_DIR=/tmp/numba_cache
69
+ ENV HOME=/tmp
70
+
67
71
  # Set the working directory to /app
68
72
  WORKDIR /app
69
73
 
@@ -22,8 +22,6 @@ class ModelRunLocally:
22
22
  def __init__(self, model_path):
23
23
  self.model_path = model_path
24
24
  self.requirements_file = os.path.join(self.model_path, "requirements.txt")
25
- self.venv_dir, self.temp_dir = self.create_temp_venv()
26
- self.python_executable = os.path.join(self.venv_dir, "bin", "python")
27
25
 
28
26
  def create_temp_venv(self):
29
27
  """Create a temporary virtual environment."""
@@ -32,6 +30,10 @@ class ModelRunLocally:
32
30
  venv_dir = os.path.join(temp_dir, "venv")
33
31
  venv.create(venv_dir, with_pip=True)
34
32
 
33
+ self.venv_dir = venv_dir
34
+ self.temp_dir = temp_dir
35
+ self.python_executable = os.path.join(venv_dir, "bin", "python")
36
+
35
37
  logger.info(f"Created temporary virtual environment at {venv_dir}")
36
38
  return venv_dir, temp_dir
37
39
 
@@ -125,7 +127,6 @@ class ModelRunLocally:
125
127
  nodepool_id="n/a",
126
128
  compute_cluster_id="n/a",
127
129
  )
128
- runner.load_model()
129
130
 
130
131
  # send an inference.
131
132
  response = self._run_model_inference(runner)
@@ -182,6 +183,7 @@ def main():
182
183
 
183
184
  model_path = args.model_path
184
185
  manager = ModelRunLocally(model_path)
186
+ manager.create_temp_venv()
185
187
 
186
188
  try:
187
189
  manager.install_requirements()
@@ -60,9 +60,6 @@ class ModelUploader:
60
60
  return config
61
61
 
62
62
  def _validate_config_checkpoints(self):
63
- if not self.config.get("checkpoints"):
64
- logger.info("No checkpoints specified in the config file")
65
- return None
66
63
 
67
64
  assert "type" in self.config.get("checkpoints"), "No loader type specified in the config file"
68
65
  loader_type = self.config.get("checkpoints").get("type")
@@ -201,15 +198,20 @@ class ModelUploader:
201
198
  return f"{self.folder}.tar.gz"
202
199
 
203
200
  def download_checkpoints(self):
201
+ if not self.config.get("checkpoints"):
202
+ logger.info("No checkpoints specified in the config file")
203
+ return True
204
+
204
205
  repo_id, hf_token = self._validate_config_checkpoints()
205
- if repo_id and hf_token:
206
- loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
207
- success = loader.download_checkpoints(self.checkpoint_path)
208
206
 
209
- if not success:
210
- logger.error(f"Failed to download checkpoints for model {repo_id}")
211
- return
207
+ loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
208
+ success = loader.download_checkpoints(self.checkpoint_path)
209
+
210
+ if not success:
211
+ logger.error(f"Failed to download checkpoints for model {repo_id}")
212
+ else:
212
213
  logger.info(f"Downloaded checkpoints for model {repo_id}")
214
+ return success
213
215
 
214
216
  def _concepts_protos_from_concepts(self, concepts):
215
217
  concept_protos = []
@@ -245,15 +247,23 @@ class ModelUploader:
245
247
  model_type_id = self.config.get('model').get('model_type_id')
246
248
  if model_type_id in self.CONCEPTS_REQUIRED_MODEL_TYPE:
247
249
 
248
- labels = HuggingFaceLoader.fetch_labels(self.checkpoint_path)
249
- # sort the concepts by id and then update the config file
250
- labels = sorted(labels.items(), key=lambda x: int(x[0]))
250
+ if 'concepts' in self.config:
251
+ labels = self.config.get('concepts')
252
+ logger.info(f"Found {len(labels)} concepts in the config file.")
253
+ for concept in labels:
254
+ concept_proto = json_format.ParseDict(concept, resources_pb2.Concept())
255
+ model_version_proto.output_info.data.concepts.append(concept_proto)
256
+ else:
257
+ labels = HuggingFaceLoader.fetch_labels(self.checkpoint_path)
258
+ logger.info(f"Found {len(labels)} concepts from the model checkpoints.")
259
+ # sort the concepts by id and then update the config file
260
+ labels = sorted(labels.items(), key=lambda x: int(x[0]))
251
261
 
252
- config_file = os.path.join(self.folder, 'config.yaml')
253
- self.hf_labels_to_config(labels, config_file)
262
+ config_file = os.path.join(self.folder, 'config.yaml')
263
+ self.hf_labels_to_config(labels, config_file)
254
264
 
255
- model_version_proto.output_info.data.concepts.extend(
256
- self._concepts_protos_from_concepts(labels))
265
+ model_version_proto.output_info.data.concepts.extend(
266
+ self._concepts_protos_from_concepts(labels))
257
267
  return model_version_proto
258
268
 
259
269
  def upload_model_version(self, download_checkpoints):
@@ -261,17 +271,31 @@ class ModelUploader:
261
271
  logger.info(f"Will tar it into file: {file_path}")
262
272
 
263
273
  model_type_id = self.config.get('model').get('model_type_id')
264
- repo_id, hf_token = self._validate_config_checkpoints()
265
-
266
- loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
267
274
 
268
- if not download_checkpoints and not loader.validate_download(self.checkpoint_path) and (
269
- model_type_id in self.CONCEPTS_REQUIRED_MODEL_TYPE) and 'concepts' not in self.config:
270
- logger.error(
271
- f"Model type {model_type_id} requires concepts to be specified in the config file or download the model checkpoints to infer the concepts."
275
+ if (model_type_id in self.CONCEPTS_REQUIRED_MODEL_TYPE) and 'concepts' not in self.config:
276
+ logger.info(
277
+ f"Model type {model_type_id} requires concepts to be specified in the config.yaml file.."
272
278
  )
273
- input("Press Enter to download the checkpoints to infer the concepts and continue...")
274
- self.download_checkpoints()
279
+ if self.config.get("checkpoints"):
280
+ logger.info(
281
+ "Checkpoints specified in the config.yaml file, will download the HF model's config.json file to infer the concepts."
282
+ )
283
+
284
+ if not download_checkpoints and not HuggingFaceLoader.validate_config(
285
+ self.checkpoint_path):
286
+
287
+ input(
288
+ "Press Enter to download the HuggingFace model's config.json file to infer the concepts and continue..."
289
+ )
290
+ repo_id, hf_token = self._validate_config_checkpoints()
291
+ loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
292
+ loader.download_config(self.checkpoint_path)
293
+
294
+ else:
295
+ logger.error(
296
+ "No checkpoints specified in the config.yaml file to infer the concepts. Please either specify the concepts directly in the config.yaml file or include a checkpoints section to download the HF model's config.json file to infer the concepts."
297
+ )
298
+ return
275
299
 
276
300
  model_version_proto = self.get_model_version_proto()
277
301
 
@@ -18,9 +18,17 @@ class HuggingFaceLoader:
18
18
  if importlib.util.find_spec("huggingface_hub") is None:
19
19
  raise ImportError(self.HF_DOWNLOAD_TEXT)
20
20
  os.environ['HF_TOKEN'] = token
21
+ from huggingface_hub import HfApi
22
+
23
+ api = HfApi()
24
+ api.whoami(token=token)
25
+
21
26
  subprocess.run(f'huggingface-cli login --token={os.environ["HF_TOKEN"]}', shell=True)
22
27
  except Exception as e:
23
- Exception("Error setting up Hugging Face token ", e)
28
+ logger.error(
29
+ f"Error setting up Hugging Face token, please make sure you have the correct token: {e}"
30
+ )
31
+ logger.info("Continuing without Hugging Face token")
24
32
 
25
33
  def download_checkpoints(self, checkpoint_path: str):
26
34
  # throw error if huggingface_hub wasn't installed
@@ -50,6 +58,28 @@ class HuggingFaceLoader:
50
58
  return False
51
59
  return True
52
60
 
61
+ def download_config(self, checkpoint_path: str):
62
+ # throw error if huggingface_hub wasn't installed
63
+ try:
64
+ from huggingface_hub import hf_hub_download
65
+ except ImportError:
66
+ raise ImportError(self.HF_DOWNLOAD_TEXT)
67
+ if os.path.exists(checkpoint_path) and os.path.exists(
68
+ os.path.join(checkpoint_path, 'config.json')):
69
+ logger.info("HF model's config.json already exists")
70
+ return True
71
+ os.makedirs(checkpoint_path, exist_ok=True)
72
+ try:
73
+ is_hf_model_exists = self.validate_hf_model()
74
+ if not is_hf_model_exists:
75
+ logger.error("Model %s not found on Hugging Face" % (self.repo_id))
76
+ return False
77
+ hf_hub_download(repo_id=self.repo_id, filename='config.json', local_dir=checkpoint_path)
78
+ except Exception as e:
79
+ logger.error(f"Error downloading model's config.json {e}")
80
+ return False
81
+ return True
82
+
53
83
  def validate_hf_model(self,):
54
84
  # check if model exists on HF
55
85
  try:
@@ -70,6 +100,12 @@ class HuggingFaceLoader:
70
100
  return (len(checkpoint_dir_files) >= len(list_repo_files(self.repo_id))) and len(
71
101
  list_repo_files(self.repo_id)) > 0
72
102
 
103
+ @staticmethod
104
+ def validate_config(checkpoint_path: str):
105
+ # check if downloaded config.json exists
106
+ return os.path.exists(checkpoint_path) and os.path.exists(
107
+ os.path.join(checkpoint_path, 'config.json'))
108
+
73
109
  @staticmethod
74
110
  def fetch_labels(checkpoint_path: str):
75
111
  # Fetch labels for classification, detection and segmentation models
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: clarifai
3
- Version: 10.9.1
3
+ Version: 10.9.2
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: clarifai-grpc >=10.8.8
23
+ Requires-Dist: clarifai-grpc >=10.9.4
24
24
  Requires-Dist: clarifai-protocol >=0.0.6
25
25
  Requires-Dist: numpy >=1.22.0
26
26
  Requires-Dist: tqdm >=4.65.0
@@ -1,4 +1,4 @@
1
- clarifai/__init__.py,sha256=1-Xk7E7-IsivFNQIvqGyYjTSA2tEoxgp679FIfjZd-w,23
1
+ clarifai/__init__.py,sha256=pIn_ah9SsqsljuRvOukQQ4KCQQ4aaDew3A8OtaeuEqU,23
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
@@ -6,7 +6,7 @@ clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,6
6
6
  clarifai/client/app.py,sha256=6pckYme1urV2YJjLIYfeZ-vH0Z5YSQa51jzIMcEfwug,38342
7
7
  clarifai/client/base.py,sha256=hSHOqkXbSKyaRDeylMMnkhUHCAHhEqno4KI0CXGziBA,7536
8
8
  clarifai/client/compute_cluster.py,sha256=lntZDLVDhS71Yj7mZrgq5uhnAuNPUnj48i3zMSuoUpk,8693
9
- clarifai/client/dataset.py,sha256=oqp6ryg7IyxCZcItzownadYJKK0s1DtghHwITN71_6E,30160
9
+ clarifai/client/dataset.py,sha256=Xt8M9sP7RLeAqqTu17-8cpupThhkU9n_bl6jXtG_25A,31148
10
10
  clarifai/client/deployment.py,sha256=4gfvUvQY9adFS98B0vP9C5fR9OnDRV2JbUIdAkMymT8,2551
11
11
  clarifai/client/input.py,sha256=cEVRytrMF1gCgwHLbXlSbPSEQN8uHpUAoKcCdyHO1pc,44406
12
12
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
@@ -56,18 +56,18 @@ clarifai/rag/utils.py,sha256=yr1jAcbpws4vFGBqlAwPPE7v1DRba48g8gixLFw8OhQ,4070
56
56
  clarifai/runners/__init__.py,sha256=3vr4RVvN1IRy2SxJpyycAAvrUBbH-mXR7pqUmu4w36A,412
57
57
  clarifai/runners/server.py,sha256=CVLrv2DjzCvKVXcJ4SWvcFWUZq0bdlBmyEpfVlfgT2A,4902
58
58
  clarifai/runners/dockerfile_template/Dockerfile.cpu.template,sha256=B35jcpqWBP3ALa2WRtbtBg8uvDyqP_PWZnJtIeAnjT0,1222
59
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=TMqTZBN1exMYzjLotn17DO4Je0rg9pBapIuwdohwht8,3228
59
+ clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=8uQp2sX_bIzgQk84FNlS19PwKH_l0Qi54xE7_NVxUTE,3314
60
60
  clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
61
  clarifai/runners/models/base_typed_model.py,sha256=OnAk08Lo2Y1fGiBc6JJ6UvJ8P435cTsikTNYDkStDpI,7790
62
62
  clarifai/runners/models/model_class.py,sha256=9JSPAr4U4K7xI0kSl-q0mHB06zknm2OR-8XIgBCto94,1611
63
- clarifai/runners/models/model_run_locally.py,sha256=35K37fq9Zkd0tautnRf9y1fvfbp_57e05-6wREBxdUI,6720
63
+ clarifai/runners/models/model_run_locally.py,sha256=eP_QFx3fuoKbf5VyRm-rfD9k6fLrXG7u1EaWz-Qjr68,6720
64
64
  clarifai/runners/models/model_runner.py,sha256=3vzoastQxkGRDK8T9aojDsLNBb9A3IiKm6YmbFrE9S0,6241
65
65
  clarifai/runners/models/model_servicer.py,sha256=L5AuqKDZrsKOnv_Fz1Ld4-nzqehltLTsYAS7NIclm1g,2880
66
- clarifai/runners/models/model_upload.py,sha256=QxuRtr5dFc5_6cEPQBklACGgHVdxeIVIMYujs_ZKO28,16648
66
+ clarifai/runners/models/model_upload.py,sha256=7Oi09Y93WUH7EKTzrKf6JQawZzLj3EnmWUDUgHOrFq8,17654
67
67
  clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
68
68
  clarifai/runners/utils/data_handler.py,sha256=sxy9zlAgI6ETuxCQhUgEXAn2GCsaW1GxpK6GTaMne0g,6966
69
69
  clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
70
- clarifai/runners/utils/loader.py,sha256=uSjn36-Henlht-mOuQN3p5XPUvsFlYPBs_J2Gu3YAOA,2900
70
+ clarifai/runners/utils/loader.py,sha256=1oktDUQA1Lpv0NiCXFwoxpp0jqqbvB7sWvpymwyWY2E,4243
71
71
  clarifai/runners/utils/url_fetcher.py,sha256=-Hwjb1SURszn7zUVwi4Of0-nrksfZy-uqT4SvPGCgSU,1446
72
72
  clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
73
73
  clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
@@ -84,9 +84,9 @@ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
84
84
  clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
85
85
  clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
86
86
  clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
87
- clarifai-10.9.1.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
88
- clarifai-10.9.1.dist-info/METADATA,sha256=TRfuw_q3tLo09odVd5cB2JTVyCunUt1DstFmvmo7CJE,19479
89
- clarifai-10.9.1.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
90
- clarifai-10.9.1.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
91
- clarifai-10.9.1.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
92
- clarifai-10.9.1.dist-info/RECORD,,
87
+ clarifai-10.9.2.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
88
+ clarifai-10.9.2.dist-info/METADATA,sha256=BcdNjVzD08g4xW4RgxN-zkLGLsG2P590d6tAzY4f5ME,19479
89
+ clarifai-10.9.2.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
90
+ clarifai-10.9.2.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
91
+ clarifai-10.9.2.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
92
+ clarifai-10.9.2.dist-info/RECORD,,