clarifai 10.5.3__py3-none-any.whl → 10.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -0
- clarifai/client/app.py +26 -0
- clarifai/client/base.py +5 -0
- clarifai/client/input.py +116 -45
- clarifai/client/search.py +28 -42
- clarifai/client/user.py +33 -0
- clarifai/constants/dataset.py +2 -0
- clarifai/datasets/export/inputs_annotations.py +6 -2
- clarifai/rag/rag.py +8 -0
- clarifai/utils/constants.py +1 -0
- clarifai/utils/logging.py +3 -1
- clarifai/versions.py +2 -1
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/METADATA +2 -2
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/RECORD +18 -91
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/runners/deepgram_live_transcribe.py +0 -98
- clarifai/runners/deepgram_live_transcribe.py~ +0 -98
- clarifai/runners/deepgram_runner.py +0 -131
- clarifai/runners/deepgram_runner.py~ +0 -130
- clarifai/runners/example_llama2.py~ +0 -72
- clarifai/runners/matt_example.py +0 -89
- clarifai/runners/matt_example.py~ +0 -87
- clarifai/runners/matt_llm_example.py +0 -129
- clarifai/runners/matt_llm_example.py~ +0 -128
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/LICENSE +0 -0
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/entry_points.txt +0 -0
- {clarifai-10.5.3.dist-info → clarifai-10.7.0.dist-info}/top_level.txt +0 -0
clarifai/__init__.py
CHANGED
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "10.7.0"
|
clarifai/client/app.py
CHANGED
@@ -592,6 +592,32 @@ class App(Lister, BaseClient):
|
|
592
592
|
"""
|
593
593
|
return Inputs.from_auth_helper(self.auth_helper)
|
594
594
|
|
595
|
+
def patch_dataset(self, dataset_id: str, action: str = 'merge', **kwargs) -> Dataset:
|
596
|
+
"""Patches a dataset for the app.
|
597
|
+
|
598
|
+
Args:
|
599
|
+
dataset_id (str): The dataset ID for the dataset to create.
|
600
|
+
action (str): The action to perform on the dataset (merge/overwrite/remove).
|
601
|
+
**kwargs: Additional keyword arguments to be passed to patch the Dataset.
|
602
|
+
|
603
|
+
Returns:
|
604
|
+
Dataset: A Dataset object for the specified dataset ID.
|
605
|
+
"""
|
606
|
+
if "visibility" in kwargs:
|
607
|
+
kwargs["visibility"] = resources_pb2.Visibility(gettable=kwargs["visibility"])
|
608
|
+
if "image_url" in kwargs:
|
609
|
+
kwargs["image"] = resources_pb2.Image(url=kwargs.pop("image_url"))
|
610
|
+
request = service_pb2.PatchDatasetsRequest(
|
611
|
+
user_app_id=self.user_app_id,
|
612
|
+
datasets=[resources_pb2.Dataset(id=dataset_id, **kwargs)],
|
613
|
+
action=action)
|
614
|
+
response = self._grpc_request(self.STUB.PatchDatasets, request)
|
615
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
616
|
+
raise Exception(response.status)
|
617
|
+
self.logger.info("\nDataset patched\n%s", response.status)
|
618
|
+
|
619
|
+
return Dataset.from_auth_helper(self.auth_helper, dataset_id=dataset_id, **kwargs)
|
620
|
+
|
595
621
|
def delete_dataset(self, dataset_id: str) -> None:
|
596
622
|
"""Deletes an dataset for the user.
|
597
623
|
|
clarifai/client/base.py
CHANGED
@@ -1,6 +1,7 @@
|
|
1
1
|
from datetime import datetime
|
2
2
|
from typing import Any, Callable
|
3
3
|
|
4
|
+
from clarifai_grpc.grpc.api import resources_pb2
|
4
5
|
from google.protobuf import struct_pb2
|
5
6
|
from google.protobuf.timestamp_pb2 import Timestamp
|
6
7
|
from google.protobuf.wrappers_pb2 import BoolValue
|
@@ -176,6 +177,10 @@ class BaseClient:
|
|
176
177
|
continue
|
177
178
|
elif key == 'size':
|
178
179
|
value = int(value)
|
180
|
+
elif key == 'image_info':
|
181
|
+
value = resources_pb2.ImageInfo(**value)
|
182
|
+
elif key == 'hosted_image_info':
|
183
|
+
continue
|
179
184
|
elif key in ['metadata']:
|
180
185
|
if isinstance(value, dict) and value != {}:
|
181
186
|
value_s = struct_pb2.Struct()
|
clarifai/client/input.py
CHANGED
@@ -65,7 +65,7 @@ class Inputs(Lister, BaseClient):
|
|
65
65
|
|
66
66
|
@staticmethod
|
67
67
|
def _get_proto(input_id: str,
|
68
|
-
dataset_id:
|
68
|
+
dataset_id: str = None,
|
69
69
|
imagepb: Image = None,
|
70
70
|
video_pb: Video = None,
|
71
71
|
audio_pb: Audio = None,
|
@@ -481,7 +481,11 @@ class Inputs(Lister, BaseClient):
|
|
481
481
|
return input_protos
|
482
482
|
|
483
483
|
@staticmethod
|
484
|
-
def get_bbox_proto(input_id: str,
|
484
|
+
def get_bbox_proto(input_id: str,
|
485
|
+
label: str,
|
486
|
+
bbox: List,
|
487
|
+
label_id: str = None,
|
488
|
+
annot_id: str = None) -> Annotation:
|
485
489
|
"""Create an annotation proto for each bounding box, label input pair.
|
486
490
|
|
487
491
|
Args:
|
@@ -489,6 +493,7 @@ class Inputs(Lister, BaseClient):
|
|
489
493
|
label (str): annotation label name
|
490
494
|
bbox (List): a list of a single bbox's coordinates. # bbox ordering: [xmin, ymin, xmax, ymax]
|
491
495
|
label_id (str): annotation label ID
|
496
|
+
annot_id (str): annotation ID
|
492
497
|
|
493
498
|
Returns:
|
494
499
|
An annotation object for the specified input ID.
|
@@ -499,31 +504,35 @@ class Inputs(Lister, BaseClient):
|
|
499
504
|
"""
|
500
505
|
if not isinstance(bbox, list):
|
501
506
|
raise UserError("must be a list of bbox cooridnates")
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
)),
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
507
|
+
annot_data = resources_pb2.Data(regions=[
|
508
|
+
resources_pb2.Region(
|
509
|
+
region_info=resources_pb2.RegionInfo(bounding_box=resources_pb2.BoundingBox(
|
510
|
+
# bbox ordering: [xmin, ymin, xmax, ymax]
|
511
|
+
# top_row must be less than bottom row
|
512
|
+
# left_col must be less than right col
|
513
|
+
top_row=bbox[1], #y_min
|
514
|
+
left_col=bbox[0], #x_min
|
515
|
+
bottom_row=bbox[3], #y_max
|
516
|
+
right_col=bbox[2] #x_max
|
517
|
+
)),
|
518
|
+
data=resources_pb2.Data(concepts=[
|
519
|
+
resources_pb2.Concept(id=f"id-{''.join(label.split(' '))}", name=label, value=1.)
|
520
|
+
if not label_id else resources_pb2.Concept(id=label_id, name=label, value=1.)
|
521
|
+
]))
|
522
|
+
])
|
523
|
+
if annot_id:
|
524
|
+
input_annot_proto = resources_pb2.Annotation(id=annot_id, input_id=input_id, data=annot_data)
|
525
|
+
else:
|
526
|
+
input_annot_proto = resources_pb2.Annotation(input_id=input_id, data=annot_data)
|
521
527
|
|
522
528
|
return input_annot_proto
|
523
529
|
|
524
530
|
@staticmethod
|
525
|
-
def get_mask_proto(input_id: str,
|
526
|
-
|
531
|
+
def get_mask_proto(input_id: str,
|
532
|
+
label: str,
|
533
|
+
polygons: List[List[float]],
|
534
|
+
label_id: str = None,
|
535
|
+
annot_id: str = None) -> Annotation:
|
527
536
|
"""Create an annotation proto for each polygon box, label input pair.
|
528
537
|
|
529
538
|
Args:
|
@@ -531,6 +540,7 @@ class Inputs(Lister, BaseClient):
|
|
531
540
|
label (str): annotation label name
|
532
541
|
polygons (List): Polygon x,y points iterable
|
533
542
|
label_id (str): annotation label ID
|
543
|
+
annot_id (str): annotation ID
|
534
544
|
|
535
545
|
Returns:
|
536
546
|
An annotation object for the specified input ID.
|
@@ -541,23 +551,24 @@ class Inputs(Lister, BaseClient):
|
|
541
551
|
"""
|
542
552
|
if not isinstance(polygons, list):
|
543
553
|
raise UserError("polygons must be a list of points")
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
554
|
+
annot_data = resources_pb2.Data(regions=[
|
555
|
+
resources_pb2.Region(
|
556
|
+
region_info=resources_pb2.RegionInfo(polygon=resources_pb2.Polygon(
|
557
|
+
points=[
|
558
|
+
resources_pb2.Point(
|
559
|
+
row=_point[1], # row is y point
|
560
|
+
col=_point[0], # col is x point
|
561
|
+
visibility="VISIBLE") for _point in polygons
|
562
|
+
])),
|
563
|
+
data=resources_pb2.Data(concepts=[
|
564
|
+
resources_pb2.Concept(id=f"id-{''.join(label.split(' '))}", name=label, value=1.)
|
565
|
+
if not label_id else resources_pb2.Concept(id=label_id, name=label, value=1.)
|
566
|
+
]))
|
567
|
+
])
|
568
|
+
if annot_id:
|
569
|
+
input_mask_proto = resources_pb2.Annotation(id=annot_id, input_id=input_id, data=annot_data)
|
570
|
+
else:
|
571
|
+
input_mask_proto = resources_pb2.Annotation(input_id=input_id, data=annot_data)
|
561
572
|
|
562
573
|
return input_mask_proto
|
563
574
|
|
@@ -707,7 +718,7 @@ class Inputs(Lister, BaseClient):
|
|
707
718
|
|
708
719
|
return input_job_id, response
|
709
720
|
|
710
|
-
def patch_inputs(self, inputs: List[Input], action: str = 'merge') ->
|
721
|
+
def patch_inputs(self, inputs: List[Input], action: str = 'merge') -> None:
|
711
722
|
"""Patch list of input objects to the app.
|
712
723
|
|
713
724
|
Args:
|
@@ -719,7 +730,6 @@ class Inputs(Lister, BaseClient):
|
|
719
730
|
"""
|
720
731
|
if not isinstance(inputs, list):
|
721
732
|
raise UserError("inputs must be a list of Input objects")
|
722
|
-
uuid.uuid4().hex # generate a unique id for this job
|
723
733
|
request = service_pb2.PatchInputsRequest(
|
724
734
|
user_app_id=self.user_app_id, inputs=inputs, action=action)
|
725
735
|
response = self._grpc_request(self.STUB.PatchInputs, request)
|
@@ -727,9 +737,9 @@ class Inputs(Lister, BaseClient):
|
|
727
737
|
try:
|
728
738
|
self.logger.warning(f"Patch inputs failed, status: {response.annotations[0].status}")
|
729
739
|
except Exception:
|
730
|
-
self.logger.warning(f"Patch inputs failed, status: {response.status
|
731
|
-
|
732
|
-
|
740
|
+
self.logger.warning(f"Patch inputs failed, status: {response.status}")
|
741
|
+
else:
|
742
|
+
self.logger.info("\nPatch Inputs Successful\n%s", response.status)
|
733
743
|
|
734
744
|
def upload_annotations(self, batch_annot: List[resources_pb2.Annotation], show_log: bool = True
|
735
745
|
) -> Union[List[resources_pb2.Annotation], List[None]]:
|
@@ -761,6 +771,67 @@ class Inputs(Lister, BaseClient):
|
|
761
771
|
|
762
772
|
return retry_upload
|
763
773
|
|
774
|
+
def patch_annotations(self, batch_annot: List[resources_pb2.Annotation],
|
775
|
+
action: str = 'merge') -> None:
|
776
|
+
"""Patch image annotations to app.
|
777
|
+
|
778
|
+
Args:
|
779
|
+
batch_annot: annot batch protos
|
780
|
+
action (str): Action to perform on the input. Options: 'merge', 'overwrite', 'remove'.
|
781
|
+
|
782
|
+
"""
|
783
|
+
if not isinstance(batch_annot, list):
|
784
|
+
raise UserError("batch_annot must be a list of Annotation objects")
|
785
|
+
request = service_pb2.PatchAnnotationsRequest(
|
786
|
+
user_app_id=self.user_app_id, annotations=batch_annot, action=action)
|
787
|
+
response = self._grpc_request(self.STUB.PatchAnnotations, request)
|
788
|
+
response_dict = MessageToDict(response)
|
789
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
790
|
+
try:
|
791
|
+
for annot in response_dict["annotations"]:
|
792
|
+
if annot['status']['code'] != status_code_pb2.ANNOTATION_SUCCESS:
|
793
|
+
self.logger.warning(f"Patch annotations failed, status: {annot['status']}")
|
794
|
+
except Exception:
|
795
|
+
self.logger.warning(f"Patch annotations failed due to {response.status}")
|
796
|
+
else:
|
797
|
+
self.logger.info("\nPatch Annotations Uploaded Successful\n%s", response.status)
|
798
|
+
|
799
|
+
def patch_concepts(self,
|
800
|
+
concept_ids: List[str],
|
801
|
+
labels: List[str] = [],
|
802
|
+
values: List[float] = [],
|
803
|
+
action: str = 'overwrite') -> None:
|
804
|
+
"""Patch concepts to app.
|
805
|
+
|
806
|
+
Args:
|
807
|
+
concept_ids: A list of concept
|
808
|
+
labels: A list of label names
|
809
|
+
values: concept value
|
810
|
+
action (str): Action to perform on the input. Options: 'overwrite'.
|
811
|
+
|
812
|
+
"""
|
813
|
+
if not labels:
|
814
|
+
labels = list(concept_ids)
|
815
|
+
if values:
|
816
|
+
concepts=[
|
817
|
+
resources_pb2.Concept(
|
818
|
+
id=concept_id, name=label, value=value)\
|
819
|
+
for concept_id, label, value in zip(concept_ids, labels, values)
|
820
|
+
]
|
821
|
+
else:
|
822
|
+
concepts=[
|
823
|
+
resources_pb2.Concept(
|
824
|
+
id=concept_id, name=label, value=1.)\
|
825
|
+
for concept_id, label in zip(concept_ids, labels)
|
826
|
+
]
|
827
|
+
request = service_pb2.PatchConceptsRequest(
|
828
|
+
user_app_id=self.user_app_id, concepts=concepts, action=action)
|
829
|
+
response = self._grpc_request(self.STUB.PatchConcepts, request)
|
830
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
831
|
+
self.logger.warning(f"Patch Concepts failed, status: {response.status.details}")
|
832
|
+
else:
|
833
|
+
self.logger.info("\nPatch Concepts Successful\n%s", response.status)
|
834
|
+
|
764
835
|
def _upload_batch(self, inputs: List[Input]) -> List[Input]:
|
765
836
|
"""Upload a batch of input objects to the app.
|
766
837
|
|
clarifai/client/search.py
CHANGED
@@ -277,7 +277,8 @@ class Search(Lister, BaseClient):
|
|
277
277
|
Get successful inputs of type image or text
|
278
278
|
>>> from clarifai.client.search import Search
|
279
279
|
>>> search = Search(user_id='user_id', app_id='app_id', top_k=10, metric='cosine')
|
280
|
-
|
280
|
+
# This performs OR operation on input_types and input_status_code
|
281
|
+
>>> res = search.query(filters=[{'input_types': ['image', 'text'], 'input_status_code': 30000}])
|
281
282
|
|
282
283
|
Vector search over inputs
|
283
284
|
>>> from clarifai.client.search import Search
|
@@ -298,43 +299,27 @@ class Search(Lister, BaseClient):
|
|
298
299
|
raise UserError(f"Invalid rank or filter input: {err}")
|
299
300
|
|
300
301
|
# For each rank, create a Rank proto message
|
301
|
-
|
302
|
+
# For ranks it only allows resources_pb2.Annotation proto, so no need of splitting protos into annotation and input.
|
303
|
+
all_ranks = []
|
302
304
|
for rank_dict in ranks:
|
303
|
-
rank_annot_proto
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
])
|
322
|
-
if self.pagination:
|
323
|
-
return self._list_all_pages_generator(self.STUB.PostInputsSearches,
|
324
|
-
service_pb2.PostInputsSearchesRequest, request_data,
|
325
|
-
page_no, per_page)
|
326
|
-
return self._list_topk_generator(self.STUB.PostInputsSearches,
|
327
|
-
service_pb2.PostInputsSearchesRequest, request_data)
|
328
|
-
|
329
|
-
# Calls PostAnnotationsSearches for annotation ranks, filters
|
330
|
-
filters_annot_proto = []
|
331
|
-
for filter_dict in filters:
|
332
|
-
filters_annot_proto.append(self._get_annot_proto(**filter_dict))
|
333
|
-
|
334
|
-
all_filters = [
|
335
|
-
resources_pb2.Filter(annotation=filter_annot) for filter_annot in filters_annot_proto
|
336
|
-
]
|
337
|
-
|
305
|
+
rank_annot_proto = self._get_annot_proto(**rank_dict)
|
306
|
+
all_ranks.append(resources_pb2.Rank(annotation=rank_annot_proto))
|
307
|
+
|
308
|
+
all_filters = []
|
309
|
+
# check for filters which is compatible with input proto
|
310
|
+
for each_filter in filters:
|
311
|
+
input_dict = {
|
312
|
+
key: each_filter.pop(key)
|
313
|
+
for key in ['input_types', 'input_dataset_ids', 'input_status_code']
|
314
|
+
if key in each_filter
|
315
|
+
}
|
316
|
+
|
317
|
+
all_filters.append(
|
318
|
+
resources_pb2.Filter(
|
319
|
+
annotation=self._get_annot_proto(**each_filter),
|
320
|
+
input=self._get_input_proto(**input_dict)))
|
321
|
+
|
322
|
+
# Create a PostInputsSearchesRequest proto message
|
338
323
|
request_data = dict(
|
339
324
|
user_app_id=self.user_app_id,
|
340
325
|
searches=[
|
@@ -343,9 +328,10 @@ class Search(Lister, BaseClient):
|
|
343
328
|
algorithm=self.algorithm,
|
344
329
|
metric=self.metric_distance)
|
345
330
|
])
|
331
|
+
# Calls PostInputsSearches for annotation ranks, input filters
|
346
332
|
if self.pagination:
|
347
|
-
return self._list_all_pages_generator(self.STUB.
|
348
|
-
service_pb2.
|
349
|
-
|
350
|
-
return self._list_topk_generator(self.STUB.
|
351
|
-
service_pb2.
|
333
|
+
return self._list_all_pages_generator(self.STUB.PostInputsSearches,
|
334
|
+
service_pb2.PostInputsSearchesRequest, request_data,
|
335
|
+
page_no, per_page)
|
336
|
+
return self._list_topk_generator(self.STUB.PostInputsSearches,
|
337
|
+
service_pb2.PostInputsSearchesRequest, request_data)
|
clarifai/client/user.py
CHANGED
@@ -3,6 +3,7 @@ from typing import Any, Dict, Generator, List
|
|
3
3
|
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
4
4
|
from clarifai_grpc.grpc.api.status import status_code_pb2
|
5
5
|
from google.protobuf.json_format import MessageToDict
|
6
|
+
from google.protobuf.wrappers_pb2 import BoolValue
|
6
7
|
|
7
8
|
from clarifai.client.app import App
|
8
9
|
from clarifai.client.base import BaseClient
|
@@ -222,6 +223,38 @@ class User(Lister, BaseClient):
|
|
222
223
|
|
223
224
|
return dict(self.auth_helper, check_runner_exists=False, **kwargs)
|
224
225
|
|
226
|
+
def patch_app(self, app_id: str, action: str = 'overwrite', **kwargs) -> App:
|
227
|
+
"""Patch an app for the user.
|
228
|
+
|
229
|
+
Args:
|
230
|
+
app_id (str): The app ID for the app to patch.
|
231
|
+
action (str): The action to perform on the app (overwrite/remove).
|
232
|
+
**kwargs: Additional keyword arguments to be passed to patch the App.
|
233
|
+
|
234
|
+
Returns:
|
235
|
+
App: Patched App object for the specified app ID.
|
236
|
+
"""
|
237
|
+
if "base_workflow" in kwargs:
|
238
|
+
kwargs["default_workflow"] = resources_pb2.Workflow(
|
239
|
+
id=kwargs.pop("base_workflow"), app_id="main", user_id="clarifai")
|
240
|
+
if "visibility" in kwargs:
|
241
|
+
kwargs["visibility"] = resources_pb2.Visibility(gettable=kwargs["visibility"])
|
242
|
+
if "image_url" in kwargs:
|
243
|
+
kwargs["image"] = resources_pb2.Image(url=kwargs.pop("image_url"))
|
244
|
+
if "is_template" in kwargs:
|
245
|
+
kwargs["is_template"] = BoolValue(value=kwargs["is_template"])
|
246
|
+
request = service_pb2.PatchAppRequest(
|
247
|
+
user_app_id=resources_pb2.UserAppIDSet(user_id=self.id, app_id=app_id),
|
248
|
+
app=resources_pb2.App(id=app_id, **kwargs),
|
249
|
+
action=action,
|
250
|
+
reindex=False)
|
251
|
+
response = self._grpc_request(self.STUB.PatchApp, request)
|
252
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
253
|
+
raise Exception(response.status)
|
254
|
+
self.logger.info("\nApp patched\n%s", response.status)
|
255
|
+
|
256
|
+
return App.from_auth_helper(auth=self.auth_helper, app_id=app_id)
|
257
|
+
|
225
258
|
def delete_app(self, app_id: str) -> None:
|
226
259
|
"""Deletes an app for the user.
|
227
260
|
|
clarifai/constants/dataset.py
CHANGED
@@ -12,6 +12,7 @@ from google.protobuf.json_format import MessageToDict
|
|
12
12
|
from PIL import ImageFile
|
13
13
|
from tqdm import tqdm
|
14
14
|
|
15
|
+
from clarifai.constants.dataset import CONTENT_TYPE
|
15
16
|
from clarifai.errors import UserError
|
16
17
|
from clarifai.utils.logging import get_logger
|
17
18
|
|
@@ -61,8 +62,11 @@ class DatasetExportReader:
|
|
61
62
|
def _download_temp_archive(self, archive_url: str,
|
62
63
|
chunk_size: int = 128) -> tempfile.TemporaryFile:
|
63
64
|
"""Downloads the temp archive of InputBatches."""
|
64
|
-
|
65
|
-
r
|
65
|
+
r = self.session.get(archive_url, stream=True)
|
66
|
+
if r.headers['content-type'] == CONTENT_TYPE['json']:
|
67
|
+
raise Exception("File is a json file :\n {}".format(r.json()))
|
68
|
+
elif r.headers['content-type'] != CONTENT_TYPE['zip']:
|
69
|
+
raise Exception('File is not a zip file')
|
66
70
|
temp_file = tempfile.TemporaryFile()
|
67
71
|
for chunk in r.iter_content(chunk_size=chunk_size):
|
68
72
|
temp_file.write(chunk)
|
clarifai/rag/rag.py
CHANGED
@@ -14,7 +14,9 @@ from clarifai.constants.rag import MAX_UPLOAD_BATCH_SIZE
|
|
14
14
|
from clarifai.errors import UserError
|
15
15
|
from clarifai.rag.utils import (convert_messages_to_str, format_assistant_message, load_documents,
|
16
16
|
split_document)
|
17
|
+
from clarifai.utils.constants import CLARIFAI_USER_ID_ENV_VAR
|
17
18
|
from clarifai.utils.logging import get_logger
|
19
|
+
from clarifai.utils.misc import get_from_dict_or_env
|
18
20
|
|
19
21
|
DEFAULT_RAG_PROMPT_TEMPLATE = "Context information is below:\n{data.hits}\nGiven the context information and not prior knowledge, answer the query.\nQuery: {data.text.raw}\nAnswer: "
|
20
22
|
|
@@ -75,6 +77,12 @@ class RAG:
|
|
75
77
|
>>> rag_agent = RAG.setup(app_url=YOUR_APP_URL)
|
76
78
|
>>> rag_agent.chat(messages=[{"role":"human", "content":"What is Clarifai"}])
|
77
79
|
"""
|
80
|
+
if not app_url:
|
81
|
+
try:
|
82
|
+
user_id = get_from_dict_or_env(key="user_id", env_key=CLARIFAI_USER_ID_ENV_VAR, **kwargs)
|
83
|
+
except Exception:
|
84
|
+
pass
|
85
|
+
|
78
86
|
now_ts = uuid.uuid4().hex[:10]
|
79
87
|
if user_id and not app_url:
|
80
88
|
user = User(user_id=user_id, base_url=base_url, pat=pat)
|
clarifai/utils/constants.py
CHANGED
clarifai/utils/logging.py
CHANGED
@@ -3,6 +3,7 @@ from collections import defaultdict
|
|
3
3
|
from typing import Dict, List, Optional, Union
|
4
4
|
|
5
5
|
from rich import print as rprint
|
6
|
+
from rich.console import Console
|
6
7
|
from rich.logging import RichHandler
|
7
8
|
from rich.table import Table
|
8
9
|
from rich.traceback import install
|
@@ -84,7 +85,8 @@ def _configure_logger(name: str, logger_level: Union[int, str] = logging.NOTSET)
|
|
84
85
|
logger.removeHandler(handler)
|
85
86
|
|
86
87
|
# Add the new rich handler and formatter
|
87
|
-
handler = RichHandler(
|
88
|
+
handler = RichHandler(
|
89
|
+
rich_tracebacks=True, log_time_format="%Y-%m-%d %H:%M:%S", console=Console(width=255))
|
88
90
|
formatter = logging.Formatter('%(name)s: %(message)s')
|
89
91
|
handler.setFormatter(formatter)
|
90
92
|
logger.addHandler(handler)
|
clarifai/versions.py
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: clarifai
|
3
|
-
Version: 10.
|
3
|
+
Version: 10.7.0
|
4
4
|
Summary: Clarifai Python SDK
|
5
5
|
Home-page: https://github.com/Clarifai/clarifai-python
|
6
6
|
Author: Clarifai
|
@@ -20,7 +20,7 @@ Classifier: Operating System :: OS Independent
|
|
20
20
|
Requires-Python: >=3.8
|
21
21
|
Description-Content-Type: text/markdown
|
22
22
|
License-File: LICENSE
|
23
|
-
Requires-Dist: clarifai-grpc >=10.
|
23
|
+
Requires-Dist: clarifai-grpc >=10.7.0
|
24
24
|
Requires-Dist: numpy >=1.22.0
|
25
25
|
Requires-Dist: tqdm >=4.65.0
|
26
26
|
Requires-Dist: tritonclient >=2.34.0
|