clarifai 10.8.4__py3-none-any.whl → 10.8.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/client/dataset.py +9 -3
  3. clarifai/constants/dataset.py +1 -1
  4. clarifai/datasets/upload/base.py +6 -3
  5. clarifai/datasets/upload/features.py +10 -0
  6. clarifai/datasets/upload/image.py +22 -13
  7. clarifai/datasets/upload/multimodal.py +70 -0
  8. clarifai/datasets/upload/text.py +8 -5
  9. clarifai/utils/misc.py +6 -0
  10. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/METADATA +2 -1
  11. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/RECORD +15 -58
  12. clarifai/models/model_serving/README.md +0 -158
  13. clarifai/models/model_serving/__init__.py +0 -14
  14. clarifai/models/model_serving/cli/__init__.py +0 -12
  15. clarifai/models/model_serving/cli/_utils.py +0 -53
  16. clarifai/models/model_serving/cli/base.py +0 -14
  17. clarifai/models/model_serving/cli/build.py +0 -79
  18. clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
  19. clarifai/models/model_serving/cli/create.py +0 -171
  20. clarifai/models/model_serving/cli/example_cli.py +0 -34
  21. clarifai/models/model_serving/cli/login.py +0 -26
  22. clarifai/models/model_serving/cli/upload.py +0 -183
  23. clarifai/models/model_serving/constants.py +0 -21
  24. clarifai/models/model_serving/docs/cli.md +0 -161
  25. clarifai/models/model_serving/docs/concepts.md +0 -229
  26. clarifai/models/model_serving/docs/dependencies.md +0 -11
  27. clarifai/models/model_serving/docs/inference_parameters.md +0 -139
  28. clarifai/models/model_serving/docs/model_types.md +0 -19
  29. clarifai/models/model_serving/model_config/__init__.py +0 -16
  30. clarifai/models/model_serving/model_config/base.py +0 -369
  31. clarifai/models/model_serving/model_config/config.py +0 -312
  32. clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
  33. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
  34. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
  35. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
  36. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
  37. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
  38. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
  39. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
  40. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
  41. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
  42. clarifai/models/model_serving/model_config/output.py +0 -133
  43. clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
  44. clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
  45. clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
  46. clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
  47. clarifai/models/model_serving/repo_build/__init__.py +0 -14
  48. clarifai/models/model_serving/repo_build/build.py +0 -198
  49. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
  50. clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
  51. clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
  52. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
  53. clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
  54. clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
  55. clarifai/models/model_serving/utils.py +0 -31
  56. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/LICENSE +0 -0
  57. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/WHEEL +0 -0
  58. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/entry_points.txt +0 -0
  59. {clarifai-10.8.4.dist-info → clarifai-10.8.5.dist-info}/top_level.txt +0 -0
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.8.4"
1
+ __version__ = "10.8.5"
@@ -25,6 +25,7 @@ from clarifai.datasets.export.inputs_annotations import (DatasetExportReader,
25
25
  from clarifai.datasets.upload.base import ClarifaiDataLoader
26
26
  from clarifai.datasets.upload.image import (VisualClassificationDataset, VisualDetectionDataset,
27
27
  VisualSegmentationDataset)
28
+ from clarifai.datasets.upload.multimodal import MultiModalDataset
28
29
  from clarifai.datasets.upload.text import TextClassificationDataset
29
30
  from clarifai.datasets.upload.utils import DisplayUploadStatus
30
31
  from clarifai.errors import UserError
@@ -352,14 +353,15 @@ class Dataset(Lister, BaseClient):
352
353
  if input_details:
353
354
  failed_input_details = [
354
355
  index, failed_id, input_details.status.details,
355
- dataset_obj.data_generator[index].image_path,
356
+ getattr(dataset_obj.data_generator[index], 'image_path', None) or
357
+ getattr(dataset_obj.data_generator[index], 'text', None),
356
358
  dataset_obj.data_generator[index].labels, dataset_obj.data_generator[index].metadata
357
359
  ]
358
360
  failed_inputs_logs.append(failed_input_details)
359
361
 
360
362
  failed_table = tabulate(
361
363
  failed_inputs_logs,
362
- headers=["Index", "Input ID", "Status", "Image Path", "Labels", "Metadata"],
364
+ headers=["Index", "Input ID", "Status", "Input", "Labels", "Metadata"],
363
365
  tablefmt="grid")
364
366
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
365
367
  self.logger.warning(
@@ -422,7 +424,8 @@ class Dataset(Lister, BaseClient):
422
424
  if self.task not in DATASET_UPLOAD_TASKS:
423
425
  raise UserError("Task should be one of \
424
426
  'text_classification', 'visual_classification', \
425
- 'visual_detection', 'visual_segmentation', 'visual_captioning'")
427
+ 'visual_detection', 'visual_segmentation', 'visual_captioning', 'multimodal_dataset'"
428
+ )
426
429
 
427
430
  if self.task == "text_classification":
428
431
  dataset_obj = TextClassificationDataset(dataloader, self.id)
@@ -433,6 +436,9 @@ class Dataset(Lister, BaseClient):
433
436
  elif self.task == "visual_segmentation":
434
437
  dataset_obj = VisualSegmentationDataset(dataloader, self.id)
435
438
 
439
+ elif self.task == "multimodal_dataset":
440
+ dataset_obj = MultiModalDataset(dataloader, self.id)
441
+
436
442
  else: # visual_classification & visual_captioning
437
443
  dataset_obj = VisualClassificationDataset(dataloader, self.id)
438
444
 
@@ -1,6 +1,6 @@
1
1
  DATASET_UPLOAD_TASKS = [
2
2
  "visual_classification", "text_classification", "visual_detection", "visual_segmentation",
3
- "visual_captioning"
3
+ "visual_captioning", "multimodal_dataset"
4
4
  ]
5
5
 
6
6
  TASK_TO_ANNOTATION_TYPE = {
@@ -4,21 +4,24 @@ from typing import Iterator, List, Tuple, TypeVar, Union
4
4
  from clarifai_grpc.grpc.api import resources_pb2
5
5
 
6
6
  from clarifai.constants.dataset import DATASET_UPLOAD_TASKS
7
- from clarifai.datasets.upload.features import (TextFeatures, VisualClassificationFeatures,
7
+ from clarifai.datasets.upload.features import (MultiModalFeatures, TextFeatures,
8
+ VisualClassificationFeatures,
8
9
  VisualDetectionFeatures, VisualSegmentationFeatures)
9
10
 
10
11
  OutputFeaturesType = TypeVar(
11
12
  'OutputFeaturesType',
12
13
  bound=Union[TextFeatures, VisualClassificationFeatures, VisualDetectionFeatures,
13
- VisualSegmentationFeatures])
14
+ VisualSegmentationFeatures, MultiModalFeatures])
14
15
 
15
16
 
16
17
  class ClarifaiDataset:
17
18
  """Clarifai datasets base class."""
18
19
 
19
- def __init__(self, data_generator: 'ClarifaiDataLoader', dataset_id: str) -> None:
20
+ def __init__(self, data_generator: 'ClarifaiDataLoader', dataset_id: str,
21
+ max_workers: int = 4) -> None:
20
22
  self.data_generator = data_generator
21
23
  self.dataset_id = dataset_id
24
+ self.max_workers = max_workers
22
25
  self.all_input_ids = {}
23
26
  self._all_input_protos = {}
24
27
  self._all_annotation_protos = defaultdict(list)
@@ -49,3 +49,13 @@ class VisualSegmentationFeatures:
49
49
  metadata: Optional[dict] = None
50
50
  image_bytes: Optional[bytes] = None
51
51
  label_ids: Optional[List[str]] = None
52
+
53
+
54
+ @dataclass
55
+ class MultiModalFeatures:
56
+ """Multi-modal datasets preprocessing output features."""
57
+ text: str
58
+ image_bytes: str
59
+ labels: List[Union[str, int]] = None # List[str or int] to cater for multi-class tasks
60
+ id: Optional[int] = None # image_id
61
+ metadata: Optional[dict] = None
@@ -1,5 +1,4 @@
1
1
  import os
2
- import uuid
3
2
  from concurrent.futures import ThreadPoolExecutor
4
3
  from typing import List, Tuple, Type
5
4
 
@@ -8,12 +7,16 @@ from google.protobuf.struct_pb2 import Struct
8
7
 
9
8
  from clarifai.client.input import Inputs
10
9
  from clarifai.datasets.upload.base import ClarifaiDataLoader, ClarifaiDataset
10
+ from clarifai.utils.misc import get_uuid
11
11
 
12
12
 
13
13
  class VisualClassificationDataset(ClarifaiDataset):
14
14
 
15
- def __init__(self, data_generator: Type[ClarifaiDataLoader], dataset_id: str) -> None:
16
- super().__init__(data_generator, dataset_id)
15
+ def __init__(self,
16
+ data_generator: Type[ClarifaiDataLoader],
17
+ dataset_id: str,
18
+ max_workers: int = 4) -> None:
19
+ super().__init__(data_generator, dataset_id, max_workers)
17
20
 
18
21
  def _extract_protos(self, batch_input_ids: List[str]
19
22
  ) -> Tuple[List[resources_pb2.Input], List[resources_pb2.Annotation]]:
@@ -33,7 +36,7 @@ class VisualClassificationDataset(ClarifaiDataset):
33
36
  labels = data_item.labels if isinstance(data_item.labels,
34
37
  list) else [data_item.labels] # clarifai concept
35
38
  label_ids = data_item.label_ids
36
- input_id = f"{self.dataset_id}-{uuid.uuid4().hex[:8]}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
39
+ input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
37
40
  geo_info = data_item.geo_info
38
41
  if data_item.metadata is not None:
39
42
  metadata.update(data_item.metadata)
@@ -64,7 +67,7 @@ class VisualClassificationDataset(ClarifaiDataset):
64
67
  geo_info=geo_info,
65
68
  metadata=metadata))
66
69
 
67
- with ThreadPoolExecutor(max_workers=4) as executor:
70
+ with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
68
71
  futures = [executor.submit(process_data_item, id) for id in batch_input_ids]
69
72
  for job in futures:
70
73
  job.result()
@@ -75,8 +78,11 @@ class VisualClassificationDataset(ClarifaiDataset):
75
78
  class VisualDetectionDataset(ClarifaiDataset):
76
79
  """Visual detection dataset proto class."""
77
80
 
78
- def __init__(self, data_generator: Type[ClarifaiDataLoader], dataset_id: str) -> None:
79
- super().__init__(data_generator, dataset_id)
81
+ def __init__(self,
82
+ data_generator: Type[ClarifaiDataLoader],
83
+ dataset_id: str,
84
+ max_workers: int = 4) -> None:
85
+ super().__init__(data_generator, dataset_id, max_workers)
80
86
 
81
87
  def _extract_protos(self, batch_input_ids: List[int]
82
88
  ) -> Tuple[List[resources_pb2.Input], List[resources_pb2.Annotation]]:
@@ -101,7 +107,7 @@ class VisualDetectionDataset(ClarifaiDataset):
101
107
  else:
102
108
  label_ids = None
103
109
  bboxes = data_item.bboxes # [[xmin,ymin,xmax,ymax],...,[xmin,ymin,xmax,ymax]]
104
- input_id = f"{self.dataset_id}-{uuid.uuid4().hex[:8]}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
110
+ input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
105
111
  if data_item.metadata is not None:
106
112
  metadata.update(data_item.metadata)
107
113
  else:
@@ -135,7 +141,7 @@ class VisualDetectionDataset(ClarifaiDataset):
135
141
  bbox=bboxes[i],
136
142
  label_id=label_ids[i] if label_ids else None))
137
143
 
138
- with ThreadPoolExecutor(max_workers=4) as executor:
144
+ with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
139
145
  futures = [executor.submit(process_data_item, id) for id in batch_input_ids]
140
146
  for job in futures:
141
147
  job.result()
@@ -146,8 +152,11 @@ class VisualDetectionDataset(ClarifaiDataset):
146
152
  class VisualSegmentationDataset(ClarifaiDataset):
147
153
  """Visual segmentation dataset proto class."""
148
154
 
149
- def __init__(self, data_generator: Type[ClarifaiDataLoader], dataset_id: str) -> None:
150
- super().__init__(data_generator, dataset_id)
155
+ def __init__(self,
156
+ data_generator: Type[ClarifaiDataLoader],
157
+ dataset_id: str,
158
+ max_workers: int = 4) -> None:
159
+ super().__init__(data_generator, dataset_id, max_workers)
151
160
 
152
161
  def _extract_protos(self, batch_input_ids: List[str]
153
162
  ) -> Tuple[List[resources_pb2.Input], List[resources_pb2.Annotation]]:
@@ -172,7 +181,7 @@ class VisualSegmentationDataset(ClarifaiDataset):
172
181
  else:
173
182
  label_ids = None
174
183
  _polygons = data_item.polygons # list of polygons: [[[x,y],...,[x,y]],...]
175
- input_id = f"{self.dataset_id}-{uuid.uuid4().hex[:8]}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
184
+ input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
176
185
  if data_item.metadata is not None:
177
186
  metadata.update(data_item.metadata)
178
187
  else:
@@ -210,7 +219,7 @@ class VisualSegmentationDataset(ClarifaiDataset):
210
219
  except IndexError:
211
220
  continue
212
221
 
213
- with ThreadPoolExecutor(max_workers=4) as executor:
222
+ with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
214
223
  futures = [executor.submit(process_data_item, id) for id in batch_input_ids]
215
224
  for job in futures:
216
225
  job.result()
@@ -0,0 +1,70 @@
1
+ from concurrent.futures import ThreadPoolExecutor
2
+ from typing import List, Tuple, Type
3
+
4
+ from clarifai_grpc.grpc.api import resources_pb2
5
+ from google.protobuf.struct_pb2 import Struct
6
+
7
+ from clarifai.client.input import Inputs
8
+ from clarifai.datasets.upload.base import ClarifaiDataLoader, ClarifaiDataset
9
+ from clarifai.utils.misc import get_uuid
10
+
11
+
12
+ class MultiModalDataset(ClarifaiDataset):
13
+
14
+ def __init__(self,
15
+ data_generator: Type[ClarifaiDataLoader],
16
+ dataset_id: str,
17
+ max_workers: int = 4) -> None:
18
+ super().__init__(data_generator, dataset_id, max_workers)
19
+
20
+ def _extract_protos(
21
+ self,
22
+ batch_input_ids: List[str],
23
+ ) -> Tuple[List[resources_pb2.Input]]:
24
+ """ Creats Multimodal (image and text) input protos for batch of input ids.
25
+ Args:
26
+ batch_input_ids: List of input IDs to retrieve the protos for.
27
+ Returns:
28
+ input_protos: List of input protos.
29
+
30
+ """
31
+ input_protos, annotation_protos = [], []
32
+
33
+ def process_data_item(id):
34
+ data_item = self.data_generator[id]
35
+ metadata = Struct()
36
+ image_bytes = data_item.image_bytes
37
+ text = data_item.text
38
+ labels = data_item.labels if isinstance(data_item.labels, list) else [data_item.labels]
39
+ id = get_uuid(8)
40
+ input_id = f"{self.dataset_id}-{id}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
41
+ if data_item.metadata is not None:
42
+ metadata.update(data_item.metadata)
43
+ else:
44
+ metadata = None
45
+
46
+ self.all_input_ids[id] = input_id
47
+ if data_item.image_bytes is not None:
48
+ input_protos.append(
49
+ Inputs.get_input_from_bytes(
50
+ input_id=input_id,
51
+ image_bytes=image_bytes,
52
+ dataset_id=self.dataset_id,
53
+ labels=labels,
54
+ metadata=metadata))
55
+ else:
56
+ input_protos.append(
57
+ Inputs.get_text_input(
58
+ input_id=input_id,
59
+ raw_text=text,
60
+ dataset_id=self.dataset_id,
61
+ labels=labels,
62
+ metadata=metadata))
63
+
64
+ with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
65
+ futures = [executor.submit(process_data_item, id) for id in batch_input_ids]
66
+
67
+ for job in futures:
68
+ job.result()
69
+
70
+ return input_protos, annotation_protos
@@ -1,4 +1,3 @@
1
- import uuid
2
1
  from concurrent.futures import ThreadPoolExecutor
3
2
  from typing import List, Tuple, Type
4
3
 
@@ -6,6 +5,7 @@ from clarifai_grpc.grpc.api import resources_pb2
6
5
  from google.protobuf.struct_pb2 import Struct
7
6
 
8
7
  from clarifai.client.input import Inputs
8
+ from clarifai.utils.misc import get_uuid
9
9
 
10
10
  from .base import ClarifaiDataLoader, ClarifaiDataset
11
11
 
@@ -13,8 +13,11 @@ from .base import ClarifaiDataLoader, ClarifaiDataset
13
13
  class TextClassificationDataset(ClarifaiDataset):
14
14
  """Upload text classification datasets to clarifai datasets"""
15
15
 
16
- def __init__(self, data_generator: Type[ClarifaiDataLoader], dataset_id: str) -> None:
17
- super().__init__(data_generator, dataset_id)
16
+ def __init__(self,
17
+ data_generator: Type[ClarifaiDataLoader],
18
+ dataset_id: str,
19
+ max_workers: int = 4) -> None:
20
+ super().__init__(data_generator, dataset_id, max_workers)
18
21
 
19
22
  def _extract_protos(self, batch_input_ids: List[int]
20
23
  ) -> Tuple[List[resources_pb2.Input], List[resources_pb2.Annotation]]:
@@ -34,7 +37,7 @@ class TextClassificationDataset(ClarifaiDataset):
34
37
  labels = data_item.labels if isinstance(data_item.labels,
35
38
  list) else [data_item.labels] # clarifai concept
36
39
  label_ids = data_item.label_ids
37
- input_id = f"{self.dataset_id}-{uuid.uuid4().hex[:8]}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
40
+ input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
38
41
  if data_item.metadata is not None:
39
42
  metadata.update(data_item.metadata)
40
43
 
@@ -48,7 +51,7 @@ class TextClassificationDataset(ClarifaiDataset):
48
51
  label_ids=label_ids,
49
52
  metadata=metadata))
50
53
 
51
- with ThreadPoolExecutor(max_workers=4) as executor:
54
+ with ThreadPoolExecutor(max_workers=self.max_workers) as executor:
52
55
  futures = [executor.submit(process_data_item, id) for id in batch_input_ids]
53
56
  for job in futures:
54
57
  job.result()
clarifai/utils/misc.py CHANGED
@@ -1,4 +1,5 @@
1
1
  import os
2
+ import uuid
2
3
  from typing import Any, Dict, List
3
4
 
4
5
  from clarifai.errors import UserError
@@ -69,3 +70,8 @@ def concept_relations_accumulation(relations_dict: Dict[str, Any], subject_conce
69
70
  relations_dict[object_concept] = []
70
71
  relations_dict[subject_concept] = []
71
72
  return relations_dict
73
+
74
+
75
+ def get_uuid(val: int) -> str:
76
+ """Generates a UUID."""
77
+ return uuid.uuid4().hex[:val]
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: clarifai
3
- Version: 10.8.4
3
+ Version: 10.8.5
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -32,6 +32,7 @@ Requires-Dist: Pillow >=9.5.0
32
32
  Requires-Dist: inquirerpy ==0.3.4
33
33
  Requires-Dist: tabulate >=0.9.0
34
34
  Requires-Dist: protobuf ==5.27.3
35
+ Requires-Dist: fsspec ==2024.6.1
35
36
  Provides-Extra: all
36
37
  Requires-Dist: pycocotools ==2.0.6 ; extra == 'all'
37
38
 
@@ -1,11 +1,11 @@
1
- clarifai/__init__.py,sha256=HSKp-CWoewa1bRsD-uVM7lGOdBh9yxh5X4Qc8hkw9ak,23
1
+ clarifai/__init__.py,sha256=2-_okis9rQrTd70xh0ljzdLxGlwhIxOh6pYB-vr7Zmk,23
2
2
  clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
3
  clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
4
  clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
5
5
  clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,662
6
6
  clarifai/client/app.py,sha256=VC5TqeYVNUyENPxOOKNSAl0q3ivPiqg-gtRr77ACIIM,38424
7
7
  clarifai/client/base.py,sha256=JXbbjg2CXo8rOdw-XgKWWtLVAhPv3OZua5LFT5w4U2Q,7380
8
- clarifai/client/dataset.py,sha256=XX-J-9Ict1CQrEycq-JbdxUTuucSgLeDSvnlHE1ucQY,29903
8
+ clarifai/client/dataset.py,sha256=AmkeYdZI7oe7ZCEh4odTuzC5r4ESCmkdHHo4v23dSeQ,30204
9
9
  clarifai/client/input.py,sha256=ZLqa1jGx4NgCbunOTpJxCq4lDQ5xAf4GQ0rsZY8AHCM,44456
10
10
  clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
11
11
  clarifai/client/model.py,sha256=YqeCwk1v_Rs2L6OTZSkDZvxGaxMoFMYuxVV7s9jdd0I,74416
@@ -17,7 +17,7 @@ clarifai/client/auth/__init__.py,sha256=7EwR0NrozkAUwpUnCsqXvE_p0wqx_SelXlSpKShK
17
17
  clarifai/client/auth/helper.py,sha256=hqwI7Zlsvivc-O9aAdtxyJT3zkpuMvbxjRaiCTsWYGk,14183
18
18
  clarifai/client/auth/register.py,sha256=2CMdBsoVLoTfjyksE6j7BM2tiEc73WKYvxnwDDgNn1k,536
19
19
  clarifai/client/auth/stub.py,sha256=xy4-fV0W8keCgXld4eOVzFQEIKxOktNwtL5bLztReug,4940
20
- clarifai/constants/dataset.py,sha256=Puz6_FfTm30G5FVBb1GJsobMkNtbg0Y2Soy7eyHjvtI,587
20
+ clarifai/constants/dataset.py,sha256=vjK3IlgXu31HycuvjRSzEQSqhU6xfj5TIgo6IpyUWoc,609
21
21
  clarifai/constants/input.py,sha256=WcHwToUVIK9ItAhDefaSohQHCLNeR55PSjZ0BFnoZ3U,28
22
22
  clarifai/constants/model.py,sha256=Um1hLfMFlh5R_vtP3Z6P-o6zon-tdbLcKVIl4PucrV4,438
23
23
  clarifai/constants/rag.py,sha256=WcHwToUVIK9ItAhDefaSohQHCLNeR55PSjZ0BFnoZ3U,28
@@ -27,10 +27,11 @@ clarifai/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU
27
27
  clarifai/datasets/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
28
  clarifai/datasets/export/inputs_annotations.py,sha256=7c6HWdATI4aPCRoCPZetUBNNEz9dBhbyYX1QqX-xYe4,9744
29
29
  clarifai/datasets/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- clarifai/datasets/upload/base.py,sha256=IP4sdBRfThk2l0W1rDWciFrAJnKwVsM-gu4zEslJ2_E,2198
31
- clarifai/datasets/upload/features.py,sha256=oq0PGpAw8LEafiSkdMMl0yn-NJeZ7K_CKzpJ71b0H40,1731
32
- clarifai/datasets/upload/image.py,sha256=pNFTThEVGIK9RNIsUuSSQE59LfO-tKkhsXSR7CONAEg,8293
33
- clarifai/datasets/upload/text.py,sha256=KM_LupAqYYxmbl1Q5nTsr3eV1RAYDfr-2xzf4cP6rfY,2083
30
+ clarifai/datasets/upload/base.py,sha256=UIc0ufyIBCrb83_sFpv21L8FshsX4nwsLYQkdlJfzD4,2357
31
+ clarifai/datasets/upload/features.py,sha256=jv2x7jGZKS-LMt87sEZNBwwOskHbP26XTMjoiaSA5pg,2024
32
+ clarifai/datasets/upload/image.py,sha256=HlCsfEMu_C4GVecGSv52RUJ6laLW8H64Pfj_FQyX6qg,8580
33
+ clarifai/datasets/upload/multimodal.py,sha256=2_s4SCotzacVMdnqkM7upSR3Ovxh3b9oW1hSWhKyxO4,2373
34
+ clarifai/datasets/upload/text.py,sha256=boVJenfQZKf79aXu8CEP4g_ANzX5ROdd06g07O7RnXU,2198
34
35
  clarifai/datasets/upload/utils.py,sha256=h7mtN9FZXhQQbf47EXczgb-NTY2uOE9AJlE9u4-hDwI,9627
35
36
  clarifai/datasets/upload/loaders/README.md,sha256=aNRutSCTzLp2ruIZx74ZkN5AxpzwKOxMa7OzabnKpwg,2980
36
37
  clarifai/datasets/upload/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -40,50 +41,6 @@ clarifai/datasets/upload/loaders/imagenet_classification.py,sha256=LuylazxpI5V8f
40
41
  clarifai/datasets/upload/loaders/xview_detection.py,sha256=hk8cZdYZimm4KOaZvBjYcC6ikURZMn51xmn7pXZT3HE,6052
41
42
  clarifai/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
42
43
  clarifai/models/api.py,sha256=d3FQQlG0mNDLrfEvchqaVcq4Tgb_TqryNnJtwp3c7sE,10961
43
- clarifai/models/model_serving/README.md,sha256=zXnKybVoIF_LYHKKY2vijTCaGcb2-GJ5kef2uB1WFrs,4241
44
- clarifai/models/model_serving/__init__.py,sha256=78fiK9LvdGvpMxICmZWqSIyS6BFATjW2s5R6_GgtbPA,645
45
- clarifai/models/model_serving/constants.py,sha256=8eVT6iYGRF4s4SKoHFUmDwCX6C2EiEiCOP1MIgtcc6A,692
46
- clarifai/models/model_serving/utils.py,sha256=vdFLyxvcDJhgFdw2jaOsVNVmLlw87ymGTgQachVleOU,1089
47
- clarifai/models/model_serving/cli/__init__.py,sha256=Nls28G-fedNw2oQZIkPQSN__TgjJXbG9RDzzuHIM0VI,575
48
- clarifai/models/model_serving/cli/_utils.py,sha256=CZTKKiaoO1Mg5MKQS2Qhgy4JRjnkEHqy8zY5U6b6C0w,1734
49
- clarifai/models/model_serving/cli/base.py,sha256=k4ARNU1koNzGAi9ach6Vpk7hpISZySiYHyKjkBLuHLg,283
50
- clarifai/models/model_serving/cli/build.py,sha256=-C4PBt-9xO9YsyUagz3kF4J0_PsYb6YVKFY8y-VmY5I,2786
51
- clarifai/models/model_serving/cli/clarifai_clis.py,sha256=sGDDj7MrlU3goWLQm4H9dCf4lPD2Ojx50_jdIoxb5QM,663
52
- clarifai/models/model_serving/cli/create.py,sha256=wtKcVi8XSPN-Fx0RrSUxEwH1hm5TbZ_FrCEMIS9yszM,5598
53
- clarifai/models/model_serving/cli/example_cli.py,sha256=tCm0J4EI0kuuSRhEiPTuraSA-bUYwtEFEHcL1eOXzRI,1039
54
- clarifai/models/model_serving/cli/login.py,sha256=IQHL3SdERThnCTGPp5HnI41B0-BTzzGb2wx0P8-KIIA,771
55
- clarifai/models/model_serving/cli/upload.py,sha256=kOz8OOEobo6sLUkS1xg0672PTmMkx0aWxjKMhSRlMwM,7013
56
- clarifai/models/model_serving/docs/cli.md,sha256=fLgyY8sYMPjYQW_q8Q9yJYB_ryDVGbzj2VouJgvkEFw,4564
57
- clarifai/models/model_serving/docs/concepts.md,sha256=ppQADibKQInf9JpfcH7wIpcMndTZ3618or5yzMhGNOE,9376
58
- clarifai/models/model_serving/docs/dependencies.md,sha256=apwg_IxDBzovtQYXRpWMU9pUqdf0VaS10yMVOYYXhoc,728
59
- clarifai/models/model_serving/docs/inference_parameters.md,sha256=EFBQs3OGQNH512zoLJKMfFD6WXE_Tzt_Uvts877VvpQ,4111
60
- clarifai/models/model_serving/docs/model_types.md,sha256=3sALugeBTMspEnlPNWXI8xtWCxjMDQYjrAji_jgqHVo,1013
61
- clarifai/models/model_serving/model_config/__init__.py,sha256=MLnCl4U2UlL8hkvKbKifFX2nKRjVN63687-gxiKf8g4,734
62
- clarifai/models/model_serving/model_config/base.py,sha256=Jow6cFvREtWRaaXw1hobWJks0uYsOi9oL973ZPEfIkk,14636
63
- clarifai/models/model_serving/model_config/config.py,sha256=EWkPcui370QEYJAjlzuLupLlaZF2BgFbK0Jhx_JDHnk,10188
64
- clarifai/models/model_serving/model_config/inference_parameter.py,sha256=fDPRkwsntaGZWQWOiCW8x0tcyHPeSCYZwBZoZb2oBzw,3924
65
- clarifai/models/model_serving/model_config/output.py,sha256=uyXY-B9mmoe8lizTpYEBRYI1KDNQh3ihEiEB4Ne65uc,4634
66
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml,sha256=4wFQ2R8PiJrXR_8AEgUDD-22gY9sK93y9r68mSOOVnw,541
67
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml,sha256=0hicyQM-R2Za62RaBexdNCkHBDdacwMRVAL8Yk_sVzs,421
68
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml,sha256=MEnVsO3-SAOFSW7-b0BOSxgUNxdhXfmE98hXstBt104,395
69
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml,sha256=FPO9ic0R_mcFa3nIGon9z3negy1q6LsPRNmJ-wqGhyw,383
70
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml,sha256=7u_0kdiR2iEuXTKHtErUzZZ8ghUdep-RuWmJd9i8BdY,371
71
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml,sha256=UDq-VtnnnhuI7NCJOYM19kFvcMS0aOvDDMSblPk5iYY,468
72
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml,sha256=cJsalUTzXclXpgzH9CutpWQqseJNg9FrI7WjU3wpfuQ,852
73
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml,sha256=OQYdrY81rD3WNooHRkOiQASvL3XfGG9GGzT61jEsrT8,406
74
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml,sha256=mQLwA1JnnvWZwp26lVxzwfhp1GS7hH9yHh7mtOUt9rY,474
75
- clarifai/models/model_serving/model_config/triton/__init__.py,sha256=uJUjpRauhVp6_9sN5DRQi7bwIKEtHPKyQqcCVj6Aj2g,719
76
- clarifai/models/model_serving/model_config/triton/serializer.py,sha256=eYwXfaJkeXMaiQq_EDG4vWOCc1CKfnC_U6dSp2Urak0,4278
77
- clarifai/models/model_serving/model_config/triton/triton_config.py,sha256=mDZafUByvEgM1vd0QZL8nM-cOCqeR-06iOC2T6x8hr4,4696
78
- clarifai/models/model_serving/model_config/triton/wrappers.py,sha256=-O8t2AEJXvqJlUNtKtr8CUlxLjheV2GfBtM0sB_B1v0,8660
79
- clarifai/models/model_serving/repo_build/__init__.py,sha256=jFb0RNG4Jh63TH35_Urv0EyNXVMW8FEC2NVHXhlbvqg,673
80
- clarifai/models/model_serving/repo_build/build.py,sha256=IlJTjt5YI1alAGv1Fw3kPZeh3yqi45R20rKbWN9vV1s,7195
81
- clarifai/models/model_serving/repo_build/static_files/_requirements.txt,sha256=lIXMfxC4BP6QA5hraObPOwUS3PK9F2mA0Gf8KvlijQE,34
82
- clarifai/models/model_serving/repo_build/static_files/base_test.py,sha256=Ne5H5JKTQTBuHYl_0g_d5JmUxW7gSECp9te7W1zcVIc,6785
83
- clarifai/models/model_serving/repo_build/static_files/inference.py,sha256=TejkXZw43mcZD-M9TkfuqMuABz_cliJgf53_Teodtf0,721
84
- clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml,sha256=VOFSSb7D_CgRRcqi-plaCH-6hoFO8NAGDNXVSOJGylo,678
85
- clarifai/models/model_serving/repo_build/static_files/test.py,sha256=GunBqWgTyo0aF5W9ckKz55tGS-wkL9S9TRfytIjB7Eo,1505
86
- clarifai/models/model_serving/repo_build/static_files/triton/model.py,sha256=l9lkwyeXw9H_K4Om9dGcuylnj4hAlzohspUZkSnQ7Qg,2429
87
44
  clarifai/modules/README.md,sha256=mx8pVx6cPp-pP4LcFPT_nX3ngGmhygVK0WiXeD3cbIo,367
88
45
  clarifai/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
89
46
  clarifai/modules/css.py,sha256=kadCEunmyh5h2yf0-4aysE3ZcZ6qaQcxuAgDXS96yF8,2020
@@ -113,7 +70,7 @@ clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
113
70
  clarifai/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
114
71
  clarifai/utils/constants.py,sha256=MG_iHnSwNEyUZOpvsrTicNwaT4CIjmlK_Ixk_qqEX8g,142
115
72
  clarifai/utils/logging.py,sha256=_nzRyXu3fUa_5X8vXig5dV392YkNESDTc3ZoiwVOGZY,11359
116
- clarifai/utils/misc.py,sha256=WV3KGM5_MwHySVthjUK4O93x6F_kE1h3-xT4zE4EvnU,2150
73
+ clarifai/utils/misc.py,sha256=ptjt1NtteDT0EhrPoyQ7mgWtvoAQ-XNncQaZvNHb0KI,2253
117
74
  clarifai/utils/model_train.py,sha256=Mndqy5GNu7kjQHjDyNVyamL0hQFLGSHcWhOuPyOvr1w,8005
118
75
  clarifai/utils/evaluation/__init__.py,sha256=PYkurUrXrGevByj7RFb6CoU1iC7fllyQSfnnlo9WnY8,69
119
76
  clarifai/utils/evaluation/helpers.py,sha256=d_dcASRI_lhsHIRukAF1S-w7XazLpK9y6E_ug3l50t4,18440
@@ -123,9 +80,9 @@ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
123
80
  clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
124
81
  clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
125
82
  clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
126
- clarifai-10.8.4.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
127
- clarifai-10.8.4.dist-info/METADATA,sha256=hkbzljAgU7J6ey53Lk-70304H-VlkHBD89z68FvJK4Y,19446
128
- clarifai-10.8.4.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
129
- clarifai-10.8.4.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
130
- clarifai-10.8.4.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
131
- clarifai-10.8.4.dist-info/RECORD,,
83
+ clarifai-10.8.5.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
84
+ clarifai-10.8.5.dist-info/METADATA,sha256=4qCot6nQ161_C99PPYSfhBS0OoyeffeT_g-dOvH_cIU,19479
85
+ clarifai-10.8.5.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
86
+ clarifai-10.8.5.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
87
+ clarifai-10.8.5.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
88
+ clarifai-10.8.5.dist-info/RECORD,,
@@ -1,158 +0,0 @@
1
- # Clarifai Model Serving
2
-
3
- ## Overview
4
-
5
- Model Serving is a part of user journey at Clarifai offers a user-friendly interface for deploying your local model into production with Clarifai, featuring:
6
-
7
- * A convenient command-line interface (CLI)
8
- * Easy implementation and testing in Python
9
- * No need for MLops expertise.
10
-
11
- ## Quickstart Guide
12
-
13
- Quick example for deploying a `text-to-text` model
14
-
15
- ### Initialize a Clarifai model repository
16
-
17
- Suppose your working directory name is `your_model_dir`. Then run
18
-
19
- ```bash
20
- $ clarifai create model --type text-to-text --working-dir your_model_dir
21
- $ cd your_model_dir
22
- ```
23
-
24
- In `your_model_dir` folder you will see essential files for deployment process
25
-
26
- ```bash
27
- your_model_dir
28
- ├── clarifai_config.yaml
29
- ├── inference.py
30
- ├── test.py
31
- └── requirements.txt
32
- ```
33
-
34
- ### Implementation
35
-
36
- Write your code in class `InferenceModel` which is an interface between your model and Clarifai server in `inference.py`, there are 2 functions you must implement:
37
-
38
- * `__init__`: load your model checkpoint once.
39
- * `predict`: make prediction, called everytime when you make request from API.
40
-
41
- For example, a complete implementation of a hf text-generation model
42
-
43
- ```python
44
- import os
45
- from typing import Dict, Union
46
- from clarifai.models.model_serving.model_config import *
47
-
48
- import torch
49
- from transformers import AutoTokenizer
50
- import transformers
51
-
52
- class InferenceModel(TextToText):
53
- """User model inference class."""
54
-
55
- def __init__(self) -> None:
56
- """
57
- Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
58
- in this method so they are loaded only once for faster inference.
59
- """
60
- # current directory
61
- self.base_path = os.path.dirname(__file__)
62
- # where you save hf checkpoint in your working dir e.i. `your_model_dir`
63
- model_path = os.path.join(self.base_path, "checkpoint")
64
- self.tokenizer = AutoTokenizer.from_pretrained(model_path)
65
- self.pipeline = transformers.pipeline(
66
- "text-generation",
67
- model=model_path,
68
- torch_dtype=torch.float16,
69
- device_map="auto",
70
- )
71
-
72
- def predict(self, input_data: list,
73
- inference_parameters: Dict[str, Union[str, float, int]]) -> list:
74
- """ Custom prediction function for `text-to-text` (also called as `text generation`) model.
75
-
76
- Args:
77
- input_data (List[str]): List of text
78
- inference_parameters (Dict[str, Union[str, float, int]]): your inference parameters
79
-
80
- Returns:
81
- list of TextOutput
82
-
83
- """
84
- output_sequences = self.pipeline(
85
- input_data,
86
- eos_token_id=self.tokenizer.eos_token_id,
87
- **inference_parameters)
88
-
89
- # wrap outputs in Clarifai defined output
90
- return [TextOutput(each[0]) for each in output_sequences]
91
- ```
92
-
93
- Update dependencies in `requirements.txt`
94
-
95
- ```
96
- clarifai
97
- torch=2.1.1
98
- transformers==4.36.2
99
- accelerate==0.26.1
100
- ```
101
-
102
- ### Test (optional)
103
-
104
- > NOTE: Running `test` is also involved in `build` and `upload` command.
105
-
106
- Test and play with your implementation by executing `test.py`.
107
-
108
- Install pytest
109
-
110
- ```bash
111
- $ pip install pytest
112
- ```
113
-
114
- Execute test
115
-
116
- ```bash
117
- $ pytest test.py
118
- ```
119
-
120
- ### Build
121
-
122
- Prepare for deployment step. Run:
123
-
124
- ```bash
125
- $ clarifai build model
126
- ```
127
-
128
- You will obtain `*.clarifai` file, it's simply a zip having all nessecary files in it to get your model work on Clarifai platform.
129
-
130
- `NOTE`: you need to upload your built file to cloud storage to get direct download `url` for next step
131
-
132
- ### Deployment
133
-
134
- Login to Clarifai
135
-
136
- ```bash
137
- $ clarifai login
138
- Get your PAT from https://clarifai.com/settings/security and pass it here: <insert your pat here>
139
- ```
140
-
141
- Upload
142
-
143
- ```bash
144
- # upload built file directly
145
- $ clarifai upload model <your-working-dir> --user-app <your_user_id>/<your_app_id> --id <your_model_id>
146
- # or using direct download url of cloud storage
147
- $ clarifai upload model --url <url> --user-app <your_user_id>/<your_app_id> --id <your_model_id>
148
- ```
149
-
150
- ## Learn More
151
-
152
- * [Detail Instruction](./docs/concepts.md)
153
- * [Examples](https://github.com/Clarifai/examples/tree/main/model_upload)
154
- * [Initialize from example](./docs/cli.md)
155
- * [CLI usage](./docs/cli.md)
156
- * [Inference parameters](./docs/inference_parameters.md)
157
- * [Model Types](./docs/model_types.md)
158
- * [Dependencies](./docs/dependencies.md)
@@ -1,14 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- from .model_config import * # noqa
14
- from .repo_build import * # noqa
@@ -1,12 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.