clarifai 10.11.1__py3-none-any.whl → 11.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.11.1"
1
+ __version__ = "11.0.1"
@@ -281,9 +281,9 @@ class Dataset(Lister, BaseClient):
281
281
  failed_input_ids = list(set(failed_input_ids) - set(duplicate_input_ids))
282
282
  duplicate_details = [[
283
283
  input_ids[id], id, "Input has a duplicate ID.",
284
- dataset_obj.data_generator[input_ids[id]].image_path,
285
- dataset_obj.data_generator[input_ids[id]].labels,
286
- dataset_obj.data_generator[input_ids[id]].metadata
284
+ getattr(dataset_obj.data_generator[input_ids[id]], 'image_path', None),
285
+ getattr(dataset_obj.data_generator[input_ids[id]], 'labels', None),
286
+ getattr(dataset_obj.data_generator[input_ids[id]], 'metadata', None)
287
287
  ] for id in duplicate_input_ids]
288
288
  duplicate_table = tabulate(
289
289
  duplicate_details,
@@ -386,7 +386,7 @@ class Dataset(Lister, BaseClient):
386
386
  tablefmt="grid")
387
387
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
388
388
  self.logger.warning(
389
- f"{timestamp}\nFailed to upload {len(failed_retrying_inputs)} inputs in current batch {batch_no}:\n{failed_table}\n\n"
389
+ f"{timestamp}\nFailed to upload {len(failed_retrying_inputs)} inputs in current batch {batch_no} due to {retry_response}:\n{failed_table}\n\n"
390
390
  )
391
391
 
392
392
  def _data_upload(self,
clarifai/client/model.py CHANGED
@@ -503,6 +503,7 @@ class Model(Lister, BaseClient):
503
503
  compute_cluster_id: str = None,
504
504
  nodepool_id: str = None,
505
505
  deployment_id: str = None,
506
+ user_id: str = None,
506
507
  inference_params: Dict = {},
507
508
  output_config: Dict = {}):
508
509
  """Predicts the model based on the given filepath.
@@ -534,7 +535,7 @@ class Model(Lister, BaseClient):
534
535
  file_bytes = f.read()
535
536
 
536
537
  return self.predict_by_bytes(file_bytes, input_type, compute_cluster_id, nodepool_id,
537
- deployment_id, inference_params, output_config)
538
+ deployment_id, user_id, inference_params, output_config)
538
539
 
539
540
  def predict_by_bytes(self,
540
541
  input_bytes: bytes,
@@ -542,6 +543,7 @@ class Model(Lister, BaseClient):
542
543
  compute_cluster_id: str = None,
543
544
  nodepool_id: str = None,
544
545
  deployment_id: str = None,
546
+ user_id: str = None,
545
547
  inference_params: Dict = {},
546
548
  output_config: Dict = {}):
547
549
  """Predicts the model based on the given bytes.
@@ -581,11 +583,23 @@ class Model(Lister, BaseClient):
581
583
 
582
584
  runner_selector = None
583
585
  if deployment_id:
586
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
587
+ raise UserError(
588
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
589
+ )
590
+ if not user_id:
591
+ user_id = os.environ.get('CLARIFAI_USER_ID')
584
592
  runner_selector = Deployment.get_runner_selector(
585
- user_id=self.user_id, deployment_id=deployment_id)
593
+ user_id=user_id, deployment_id=deployment_id)
586
594
  elif compute_cluster_id and nodepool_id:
595
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
596
+ raise UserError(
597
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
598
+ )
599
+ if not user_id:
600
+ user_id = os.environ.get('CLARIFAI_USER_ID')
587
601
  runner_selector = Nodepool.get_runner_selector(
588
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
602
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
589
603
 
590
604
  return self.predict(
591
605
  inputs=[input_proto],
@@ -599,6 +613,7 @@ class Model(Lister, BaseClient):
599
613
  compute_cluster_id: str = None,
600
614
  nodepool_id: str = None,
601
615
  deployment_id: str = None,
616
+ user_id: str = None,
602
617
  inference_params: Dict = {},
603
618
  output_config: Dict = {}):
604
619
  """Predicts the model based on the given URL.
@@ -639,11 +654,23 @@ class Model(Lister, BaseClient):
639
654
 
640
655
  runner_selector = None
641
656
  if deployment_id:
657
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
658
+ raise UserError(
659
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
660
+ )
661
+ if not user_id:
662
+ user_id = os.environ.get('CLARIFAI_USER_ID')
642
663
  runner_selector = Deployment.get_runner_selector(
643
- user_id=self.user_id, deployment_id=deployment_id)
664
+ user_id=user_id, deployment_id=deployment_id)
644
665
  elif compute_cluster_id and nodepool_id:
666
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
667
+ raise UserError(
668
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
669
+ )
670
+ if not user_id:
671
+ user_id = os.environ.get('CLARIFAI_USER_ID')
645
672
  runner_selector = Nodepool.get_runner_selector(
646
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
673
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
647
674
 
648
675
  return self.predict(
649
676
  inputs=[input_proto],
@@ -712,6 +739,7 @@ class Model(Lister, BaseClient):
712
739
  compute_cluster_id: str = None,
713
740
  nodepool_id: str = None,
714
741
  deployment_id: str = None,
742
+ user_id: str = None,
715
743
  inference_params: Dict = {},
716
744
  output_config: Dict = {}):
717
745
  """Generate the stream output on model based on the given filepath.
@@ -748,6 +776,7 @@ class Model(Lister, BaseClient):
748
776
  compute_cluster_id=compute_cluster_id,
749
777
  nodepool_id=nodepool_id,
750
778
  deployment_id=deployment_id,
779
+ user_id=user_id,
751
780
  inference_params=inference_params,
752
781
  output_config=output_config)
753
782
 
@@ -757,6 +786,7 @@ class Model(Lister, BaseClient):
757
786
  compute_cluster_id: str = None,
758
787
  nodepool_id: str = None,
759
788
  deployment_id: str = None,
789
+ user_id: str = None,
760
790
  inference_params: Dict = {},
761
791
  output_config: Dict = {}):
762
792
  """Generate the stream output on model based on the given bytes.
@@ -798,11 +828,21 @@ class Model(Lister, BaseClient):
798
828
 
799
829
  runner_selector = None
800
830
  if deployment_id:
831
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
832
+ raise UserError(
833
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
834
+ )
801
835
  runner_selector = Deployment.get_runner_selector(
802
- user_id=self.user_id, deployment_id=deployment_id)
836
+ user_id=user_id, deployment_id=deployment_id)
803
837
  elif compute_cluster_id and nodepool_id:
838
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
839
+ raise UserError(
840
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
841
+ )
842
+ if not user_id:
843
+ user_id = os.environ.get('CLARIFAI_USER_ID')
804
844
  runner_selector = Nodepool.get_runner_selector(
805
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
845
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
806
846
 
807
847
  return self.generate(
808
848
  inputs=[input_proto],
@@ -816,6 +856,7 @@ class Model(Lister, BaseClient):
816
856
  compute_cluster_id: str = None,
817
857
  nodepool_id: str = None,
818
858
  deployment_id: str = None,
859
+ user_id: str = None,
819
860
  inference_params: Dict = {},
820
861
  output_config: Dict = {}):
821
862
  """Generate the stream output on model based on the given URL.
@@ -857,11 +898,23 @@ class Model(Lister, BaseClient):
857
898
 
858
899
  runner_selector = None
859
900
  if deployment_id:
901
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
902
+ raise UserError(
903
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
904
+ )
905
+ if not user_id:
906
+ user_id = os.environ.get('CLARIFAI_USER_ID')
860
907
  runner_selector = Deployment.get_runner_selector(
861
- user_id=self.user_id, deployment_id=deployment_id)
908
+ user_id=user_id, deployment_id=deployment_id)
862
909
  elif compute_cluster_id and nodepool_id:
910
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
911
+ raise UserError(
912
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
913
+ )
914
+ if not user_id:
915
+ user_id = os.environ.get('CLARIFAI_USER_ID')
863
916
  runner_selector = Nodepool.get_runner_selector(
864
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
917
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
865
918
 
866
919
  return self.generate(
867
920
  inputs=[input_proto],
@@ -930,6 +983,7 @@ class Model(Lister, BaseClient):
930
983
  compute_cluster_id: str = None,
931
984
  nodepool_id: str = None,
932
985
  deployment_id: str = None,
986
+ user_id: str = None,
933
987
  inference_params: Dict = {},
934
988
  output_config: Dict = {}):
935
989
  """Stream the model output based on the given filepath.
@@ -964,6 +1018,7 @@ class Model(Lister, BaseClient):
964
1018
  compute_cluster_id=compute_cluster_id,
965
1019
  nodepool_id=nodepool_id,
966
1020
  deployment_id=deployment_id,
1021
+ user_id=user_id,
967
1022
  inference_params=inference_params,
968
1023
  output_config=output_config)
969
1024
 
@@ -973,6 +1028,7 @@ class Model(Lister, BaseClient):
973
1028
  compute_cluster_id: str = None,
974
1029
  nodepool_id: str = None,
975
1030
  deployment_id: str = None,
1031
+ user_id: str = None,
976
1032
  inference_params: Dict = {},
977
1033
  output_config: Dict = {}):
978
1034
  """Stream the model output based on the given bytes.
@@ -1016,11 +1072,23 @@ class Model(Lister, BaseClient):
1016
1072
 
1017
1073
  runner_selector = None
1018
1074
  if deployment_id:
1075
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1076
+ raise UserError(
1077
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
1078
+ )
1079
+ if not user_id:
1080
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1019
1081
  runner_selector = Deployment.get_runner_selector(
1020
- user_id=self.user_id, deployment_id=deployment_id)
1082
+ user_id=user_id, deployment_id=deployment_id)
1021
1083
  elif compute_cluster_id and nodepool_id:
1084
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1085
+ raise UserError(
1086
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
1087
+ )
1088
+ if not user_id:
1089
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1022
1090
  runner_selector = Nodepool.get_runner_selector(
1023
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1091
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1024
1092
 
1025
1093
  return self.stream(
1026
1094
  inputs=input_generator(),
@@ -1034,6 +1102,7 @@ class Model(Lister, BaseClient):
1034
1102
  compute_cluster_id: str = None,
1035
1103
  nodepool_id: str = None,
1036
1104
  deployment_id: str = None,
1105
+ user_id: str = None,
1037
1106
  inference_params: Dict = {},
1038
1107
  output_config: Dict = {}):
1039
1108
  """Stream the model output based on the given URL.
@@ -1075,11 +1144,23 @@ class Model(Lister, BaseClient):
1075
1144
 
1076
1145
  runner_selector = None
1077
1146
  if deployment_id:
1147
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1148
+ raise UserError(
1149
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
1150
+ )
1151
+ if not user_id:
1152
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1078
1153
  runner_selector = Deployment.get_runner_selector(
1079
- user_id=self.user_id, deployment_id=deployment_id)
1154
+ user_id=user_id, deployment_id=deployment_id)
1080
1155
  elif compute_cluster_id and nodepool_id:
1156
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1157
+ raise UserError(
1158
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
1159
+ )
1160
+ if not user_id:
1161
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1081
1162
  runner_selector = Nodepool.get_runner_selector(
1082
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1163
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1083
1164
 
1084
1165
  return self.stream(
1085
1166
  inputs=input_generator(),
@@ -7,7 +7,7 @@ from typing import List, Optional, Union
7
7
  class TextFeatures:
8
8
  """Text classification datasets preprocessing output features."""
9
9
  text: str
10
- labels: List[Union[str, int]] # List[str or int] to cater for multi-class tasks
10
+ labels: List[Union[str, int]] = None # List[str or int] to cater for multi-class tasks
11
11
  id: Optional[int] = None # text_id
12
12
  metadata: Optional[dict] = None
13
13
  label_ids: Optional[List[str]] = None
@@ -34,7 +34,8 @@ class MultiModalDataset(ClarifaiDataset):
34
34
  metadata = Struct()
35
35
  image_bytes = data_item.image_bytes
36
36
  text = data_item.text
37
- labels = data_item.labels if isinstance(data_item.labels, list) else [data_item.labels]
37
+ labels = data_item.labels if ((data_item.labels is None) or
38
+ isinstance(data_item.labels, list)) else [data_item.labels]
38
39
  input_id = f"{self.dataset_id}-{id}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
39
40
  if data_item.metadata is not None:
40
41
  metadata.update(data_item.metadata)
@@ -34,8 +34,9 @@ class TextClassificationDataset(ClarifaiDataset):
34
34
  data_item = self.data_generator[id]
35
35
  metadata = Struct()
36
36
  text = data_item.text
37
- labels = data_item.labels if isinstance(data_item.labels,
38
- list) else [data_item.labels] # clarifai concept
37
+ labels = data_item.labels if ((data_item.labels is None) or
38
+ isinstance(data_item.labels, list)) else [data_item.labels
39
+ ] # clarifai concept
39
40
  label_ids = data_item.label_ids
40
41
  input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
41
42
  if data_item.metadata is not None:
@@ -1,5 +1,6 @@
1
1
  import os
2
2
  import re
3
+ import sys
3
4
  import time
4
5
  from string import Template
5
6
 
@@ -8,6 +9,7 @@ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
8
9
  from clarifai_grpc.grpc.api.status import status_code_pb2
9
10
  from google.protobuf import json_format
10
11
  from rich import print
12
+ from rich.markup import escape
11
13
 
12
14
  from clarifai.client import BaseClient
13
15
  from clarifai.runners.utils.const import (AVAILABLE_PYTHON_IMAGES, AVAILABLE_TORCH_IMAGES,
@@ -31,6 +33,7 @@ class ModelUploader:
31
33
  self._client = None
32
34
  self.folder = self._validate_folder(folder)
33
35
  self.config = self._load_config(os.path.join(self.folder, 'config.yaml'))
36
+ self._validate_config()
34
37
  self.model_proto = self._get_model_proto()
35
38
  self.model_id = self.model_proto.id
36
39
  self.model_version_id = None
@@ -69,13 +72,61 @@ class ModelUploader:
69
72
  assert "repo_id" in self.config.get("checkpoints"), "No repo_id specified in the config file"
70
73
  repo_id = self.config.get("checkpoints").get("repo_id")
71
74
 
72
- # prefer env var for HF_TOKEN but if not provided then use the one from config.yaml if any.
73
- if 'HF_TOKEN' in os.environ:
74
- hf_token = os.environ['HF_TOKEN']
75
- else:
76
- hf_token = self.config.get("checkpoints").get("hf_token", None)
75
+ hf_token = self.config.get("checkpoints").get("hf_token", None)
77
76
  return repo_id, hf_token
78
77
 
78
+ def _check_app_exists(self):
79
+ resp = self.client.STUB.GetApp(service_pb2.GetAppRequest(user_app_id=self.client.user_app_id))
80
+ if resp.status.code == status_code_pb2.SUCCESS:
81
+ return True
82
+ return False
83
+
84
+ def _validate_config_model(self):
85
+ assert "model" in self.config, "model section not found in the config file"
86
+ model = self.config.get('model')
87
+ assert "user_id" in model, "user_id not found in the config file"
88
+ assert "app_id" in model, "app_id not found in the config file"
89
+ assert "model_type_id" in model, "model_type_id not found in the config file"
90
+ assert "id" in model, "model_id not found in the config file"
91
+ if '.' in model.get('id'):
92
+ logger.error(
93
+ "Model ID cannot contain '.', please remove it from the model_id in the config file")
94
+ sys.exit(1)
95
+
96
+ assert model.get('user_id') != "", "user_id cannot be empty in the config file"
97
+ assert model.get('app_id') != "", "app_id cannot be empty in the config file"
98
+ assert model.get('model_type_id') != "", "model_type_id cannot be empty in the config file"
99
+ assert model.get('id') != "", "model_id cannot be empty in the config file"
100
+
101
+ if not self._check_app_exists():
102
+ logger.error(
103
+ f"App {self.client.user_app_id.app_id} not found for user {self.client.user_app_id.user_id}"
104
+ )
105
+ sys.exit(1)
106
+
107
+ def _validate_config(self):
108
+ self._validate_config_model()
109
+
110
+ if self.config.get("checkpoints"):
111
+ self._validate_config_checkpoints()
112
+
113
+ assert "inference_compute_info" in self.config, "inference_compute_info not found in the config file"
114
+
115
+ if self.config.get("concepts"):
116
+ model_type_id = self.config.get('model').get('model_type_id')
117
+ assert model_type_id in CONCEPTS_REQUIRED_MODEL_TYPE, f"Model type {model_type_id} not supported for concepts"
118
+
119
+ if self.config.get("checkpoints"):
120
+ _, hf_token = self._validate_config_checkpoints()
121
+
122
+ if hf_token:
123
+ is_valid_token = HuggingFaceLoader.validate_hftoken(hf_token)
124
+ if not is_valid_token:
125
+ logger.error(
126
+ "Invalid Hugging Face token provided in the config file, this might cause issues with downloading the restricted model checkpoints."
127
+ )
128
+ logger.info("Continuing without Hugging Face token")
129
+
79
130
  @property
80
131
  def client(self):
81
132
  if self._client is None:
@@ -259,6 +310,7 @@ class ModelUploader:
259
310
 
260
311
  if not success:
261
312
  logger.error(f"Failed to download checkpoints for model {repo_id}")
313
+ sys.exit(1)
262
314
  else:
263
315
  logger.info(f"Downloaded checkpoints for model {repo_id}")
264
316
  return success
@@ -366,6 +418,9 @@ class ModelUploader:
366
418
  logger.info(f"Size of the tar is: {file_size} bytes")
367
419
 
368
420
  self.maybe_create_model()
421
+ if not self.check_model_exists():
422
+ logger.error(f"Failed to create model: {self.model_proto.id}")
423
+ sys.exit(1)
369
424
 
370
425
  for response in self.client.STUB.PostModelVersionsUpload(
371
426
  self.model_version_stream_upload_iterator(model_version_proto, file_path),):
@@ -470,7 +525,7 @@ class ModelUploader:
470
525
  for log_entry in logs.log_entries:
471
526
  if log_entry.url not in seen_logs:
472
527
  seen_logs.add(log_entry.url)
473
- print(f"Model Building Logs...: {log_entry.message.strip()}")
528
+ print(f"Model Building Logs...: {escape(log_entry.message.strip())}")
474
529
  time.sleep(1)
475
530
  elif status_code == status_code_pb2.MODEL_TRAINED:
476
531
  logger.info(f"\nModel build complete! (elapsed {time.time() - st:.1f}s)")
@@ -14,22 +14,28 @@ class HuggingFaceLoader:
14
14
  self.repo_id = repo_id
15
15
  self.token = token
16
16
  if token:
17
- try:
18
- if importlib.util.find_spec("huggingface_hub") is None:
19
- raise ImportError(self.HF_DOWNLOAD_TEXT)
20
- os.environ['HF_TOKEN'] = token
21
- from huggingface_hub import HfApi
22
-
23
- api = HfApi()
24
- api.whoami(token=token)
25
-
17
+ if self.validate_hftoken(token):
26
18
  subprocess.run(f'huggingface-cli login --token={os.environ["HF_TOKEN"]}', shell=True)
27
- except Exception as e:
28
- logger.error(
29
- f"Error setting up Hugging Face token, please make sure you have the correct token: {e}"
30
- )
19
+ logger.info("Hugging Face token validated")
20
+ else:
31
21
  logger.info("Continuing without Hugging Face token")
32
22
 
23
+ @classmethod
24
+ def validate_hftoken(cls, hf_token: str):
25
+ try:
26
+ if importlib.util.find_spec("huggingface_hub") is None:
27
+ raise ImportError(cls.HF_DOWNLOAD_TEXT)
28
+ os.environ['HF_TOKEN'] = hf_token
29
+ from huggingface_hub import HfApi
30
+
31
+ api = HfApi()
32
+ api.whoami(token=hf_token)
33
+ return True
34
+ except Exception as e:
35
+ logger.error(
36
+ f"Error setting up Hugging Face token, please make sure you have the correct token: {e}")
37
+ return False
38
+
33
39
  def download_checkpoints(self, checkpoint_path: str):
34
40
  # throw error if huggingface_hub wasn't installed
35
41
  try:
@@ -49,7 +55,7 @@ class HuggingFaceLoader:
49
55
  snapshot_download(
50
56
  repo_id=self.repo_id, local_dir=checkpoint_path, local_dir_use_symlinks=False)
51
57
  except Exception as e:
52
- logger.exception(f"Error downloading model checkpoints {e}")
58
+ logger.error(f"Error downloading model checkpoints {e}")
53
59
  return False
54
60
  finally:
55
61
  is_downloaded = self.validate_download(checkpoint_path)
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: clarifai
3
- Version: 10.11.1
3
+ Version: 11.0.1
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
20
20
  Requires-Python: >=3.8
21
21
  Description-Content-Type: text/markdown
22
22
  License-File: LICENSE
23
- Requires-Dist: clarifai-grpc>=10.11.2
23
+ Requires-Dist: clarifai-grpc>=11.0.0
24
24
  Requires-Dist: clarifai-protocol>=0.0.14
25
25
  Requires-Dist: numpy>=1.22.0
26
26
  Requires-Dist: tqdm>=4.65.0
@@ -35,6 +35,17 @@ Requires-Dist: fsspec==2024.6.1
35
35
  Requires-Dist: click==8.1.7
36
36
  Provides-Extra: all
37
37
  Requires-Dist: pycocotools==2.0.6; extra == "all"
38
+ Dynamic: author
39
+ Dynamic: author-email
40
+ Dynamic: classifier
41
+ Dynamic: description
42
+ Dynamic: description-content-type
43
+ Dynamic: home-page
44
+ Dynamic: license
45
+ Dynamic: provides-extra
46
+ Dynamic: requires-dist
47
+ Dynamic: requires-python
48
+ Dynamic: summary
38
49
 
39
50
  <h1 align="center">
40
51
  <a href="https://www.clarifai.com/"><img alt="Clarifai" title="Clarifai" src="https://github.com/user-attachments/assets/623b883b-7fe5-4b95-bbfa-8691f5779af4"></a>
@@ -1,4 +1,4 @@
1
- clarifai/__init__.py,sha256=FW_6Eb2k6BFCnOXd3YfZT6V1LNiTbns6HVOCriUm7sE,24
1
+ clarifai/__init__.py,sha256=Zmzw2GjcXgMCFI3odyVpnnMKDMWMnQK5e88UxtrzWcY,23
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
@@ -13,11 +13,11 @@ clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,6
13
13
  clarifai/client/app.py,sha256=6pckYme1urV2YJjLIYfeZ-vH0Z5YSQa51jzIMcEfwug,38342
14
14
  clarifai/client/base.py,sha256=hSHOqkXbSKyaRDeylMMnkhUHCAHhEqno4KI0CXGziBA,7536
15
15
  clarifai/client/compute_cluster.py,sha256=EvW9TJjPvInUlggfg1A98sxoWH8_PY5rCVXZhsj6ac0,8705
16
- clarifai/client/dataset.py,sha256=AIzwbYs-ExkmUqW9nuEJgpW8-D7rjA1PtopU5Iu6YZE,32018
16
+ clarifai/client/dataset.py,sha256=y3zKT_VhP1gyN3OO-b3cPeW21ZXyKbQ7ZJkEG06bsTU,32096
17
17
  clarifai/client/deployment.py,sha256=w7Y6pA1rYG4KRK1SwusRZc2sQRXlG8wezuVdzSWpCo0,2586
18
18
  clarifai/client/input.py,sha256=GvrPV2chThNjimekBIleuIr6AD10_wrfc-1Hm5C4NQ8,45648
19
19
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
20
- clarifai/client/model.py,sha256=8koRWV_-cLLtZYFHQzNxMFw2X1VXAZ6aJI-1cOp6r4U,84655
20
+ clarifai/client/model.py,sha256=0HC22i8RCjkC2lwRKnokMR6eaSt9_XXbi1Oim4sZVk4,88620
21
21
  clarifai/client/module.py,sha256=FTkm8s9m-EaTKN7g9MnLhGJ9eETUfKG7aWZ3o1RshYs,4204
22
22
  clarifai/client/nodepool.py,sha256=la3vTFrO4LX8zm2eQ5jqf2L0-kQ63Dano8FibadoZbk,10152
23
23
  clarifai/client/search.py,sha256=GaPWN6JmTQGZaCHr6U1yv0zqR6wKFl7i9IVLg2ul1CI,14254
@@ -39,10 +39,10 @@ clarifai/datasets/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
39
39
  clarifai/datasets/export/inputs_annotations.py,sha256=3AtUBrMIjw8H3ehDsJFYcBFoAZ1QKQo1hXTMsHh8f20,10159
40
40
  clarifai/datasets/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
41
41
  clarifai/datasets/upload/base.py,sha256=UIc0ufyIBCrb83_sFpv21L8FshsX4nwsLYQkdlJfzD4,2357
42
- clarifai/datasets/upload/features.py,sha256=jv2x7jGZKS-LMt87sEZNBwwOskHbP26XTMjoiaSA5pg,2024
42
+ clarifai/datasets/upload/features.py,sha256=GK69WvUYnks5G26Z5L5XSisBIZILLv7lYhS2y8BJCt0,2031
43
43
  clarifai/datasets/upload/image.py,sha256=HlCsfEMu_C4GVecGSv52RUJ6laLW8H64Pfj_FQyX6qg,8580
44
- clarifai/datasets/upload/multimodal.py,sha256=4jBFXgT44tPFHm3O3lYcnKM046qjUNJJaR0oBVTa3HM,2309
45
- clarifai/datasets/upload/text.py,sha256=boVJenfQZKf79aXu8CEP4g_ANzX5ROdd06g07O7RnXU,2198
44
+ clarifai/datasets/upload/multimodal.py,sha256=_NpNQak9KMn0NOiOr48MYnXL0GQZ1LXKhwdYF1HhrHs,2377
45
+ clarifai/datasets/upload/text.py,sha256=dpRMNz49EyKau0kwksEaNV6TLBUf5lSr7t5g3pG2byM,2298
46
46
  clarifai/datasets/upload/utils.py,sha256=BerWhq40ZUN30z6VImlc93eZtT-1vI18AMgSOuNzJEM,9647
47
47
  clarifai/datasets/upload/loaders/README.md,sha256=aNRutSCTzLp2ruIZx74ZkN5AxpzwKOxMa7OzabnKpwg,2980
48
48
  clarifai/datasets/upload/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -69,12 +69,12 @@ clarifai/runners/models/model_class.py,sha256=9JSPAr4U4K7xI0kSl-q0mHB06zknm2OR-8
69
69
  clarifai/runners/models/model_run_locally.py,sha256=OhzQbmaV8Wwgs2H0KhdDF6Z7bYSaIh4RRA0QwSiv5vY,20644
70
70
  clarifai/runners/models/model_runner.py,sha256=3vzoastQxkGRDK8T9aojDsLNBb9A3IiKm6YmbFrE9S0,6241
71
71
  clarifai/runners/models/model_servicer.py,sha256=X4715PVA5PBurRTYcwSEudg8fShGV6InAF4mmRlRcHg,2826
72
- clarifai/runners/models/model_upload.py,sha256=xQ0AqeBVePPwmMVM5uOiXRTRV09U-du2FduKv7Qgl-A,20087
72
+ clarifai/runners/models/model_upload.py,sha256=ggUa1OwqZg57C-Dagf6U22fSC4SHKZ_mB4xMSynCtPg,22411
73
73
  clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
74
74
  clarifai/runners/utils/const.py,sha256=zINrIjDESUykq6xMLKNghwJ6N8qxDAtAJ6-1bH8VdOw,1238
75
75
  clarifai/runners/utils/data_handler.py,sha256=sxy9zlAgI6ETuxCQhUgEXAn2GCsaW1GxpK6GTaMne0g,6966
76
76
  clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
77
- clarifai/runners/utils/loader.py,sha256=1oktDUQA1Lpv0NiCXFwoxpp0jqqbvB7sWvpymwyWY2E,4243
77
+ clarifai/runners/utils/loader.py,sha256=k2sZeH3awPDzF0HPeYqExZ1fEX6azAc8PCl5ddWOGKE,4414
78
78
  clarifai/runners/utils/url_fetcher.py,sha256=v_8JOWmkyFAzsBulsieKX7Nfjy1Yg7wGSZeqfEvw2cg,1640
79
79
  clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
80
80
  clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
@@ -92,9 +92,9 @@ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
92
92
  clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
93
93
  clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
94
94
  clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
95
- clarifai-10.11.1.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
96
- clarifai-10.11.1.dist-info/METADATA,sha256=w46FTa1y_xtH2ZXLrCYI9dC0o-Uh48q7gqC_kXYZ50A,22220
97
- clarifai-10.11.1.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
98
- clarifai-10.11.1.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
99
- clarifai-10.11.1.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
100
- clarifai-10.11.1.dist-info/RECORD,,
95
+ clarifai-11.0.1.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
96
+ clarifai-11.0.1.dist-info/METADATA,sha256=vmixUJQ2NNbLBg8KmGuOhwW50uGqphmLvOm_4y8dq8Q,22456
97
+ clarifai-11.0.1.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
98
+ clarifai-11.0.1.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
99
+ clarifai-11.0.1.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
100
+ clarifai-11.0.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5