clarifai 10.5.2__tar.gz → 10.5.4__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {clarifai-10.5.2/clarifai.egg-info → clarifai-10.5.4}/PKG-INFO +1 -1
- clarifai-10.5.4/VERSION +1 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/auth/helper.py +3 -2
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/input.py +116 -45
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/dataset.py +2 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/export/inputs_annotations.py +6 -2
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/login.py +1 -1
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/constants.py +9 -2
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/utils.py +2 -1
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/rag/rag.py +8 -0
- clarifai-10.5.4/clarifai/utils/constants.py +3 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/logging.py +3 -1
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/versions.py +1 -1
- {clarifai-10.5.2 → clarifai-10.5.4/clarifai.egg-info}/PKG-INFO +1 -1
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_app.py +11 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_data_upload.py +47 -1
- clarifai-10.5.2/VERSION +0 -1
- clarifai-10.5.2/clarifai/utils/constants.py +0 -12
- {clarifai-10.5.2 → clarifai-10.5.4}/LICENSE +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/MANIFEST.in +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/README.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/cli.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/app.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/auth/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/auth/register.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/auth/stub.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/base.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/dataset.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/lister.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/model.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/module.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/search.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/user.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/client/workflow.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/input.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/model.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/rag.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/search.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/constants/workflow.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/export/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/base.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/features.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/image.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/README.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/coco_captions.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/coco_detection.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/imagenet_classification.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/xview_detection.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/text.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/utils.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/errors.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/api.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/README.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/_utils.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/base.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/build.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/clarifai_clis.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/create.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/example_cli.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/cli/upload.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/cli.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/concepts.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/dependencies.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/inference_parameters.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/model_types.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/base.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/config.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/inference_parameter.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/output.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/serializer.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/triton_config.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/wrappers.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/build.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/inference.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/test.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/modules/README.md +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/modules/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/modules/css.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/modules/pages.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/modules/style.css +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/rag/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/rag/utils.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/schema/search.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/urls/helper.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/evaluation/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/evaluation/helpers.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/evaluation/main.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/evaluation/testset_annotation_parser.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/misc.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/utils/model_train.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/workflows/__init__.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/workflows/export.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/workflows/utils.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai/workflows/validate.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai.egg-info/SOURCES.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai.egg-info/dependency_links.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai.egg-info/entry_points.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai.egg-info/requires.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/clarifai.egg-info/top_level.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/pyproject.toml +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/requirements.txt +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/setup.cfg +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/setup.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_auth.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_eval.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_misc.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_model_predict.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_model_train.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_modules.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_rag.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_search.py +0 -0
- {clarifai-10.5.2 → clarifai-10.5.4}/tests/test_stub.py +0 -0
clarifai-10.5.4/VERSION
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
10.5.4
|
@@ -5,6 +5,7 @@ from urllib.parse import urlparse
|
|
5
5
|
|
6
6
|
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
|
7
7
|
from clarifai_grpc.grpc.api import resources_pb2, service_pb2_grpc
|
8
|
+
from clarifai.utils.constants import CLARIFAI_PAT_ENV_VAR, CLARIFAI_SESSION_TOKEN_ENV_VAR
|
8
9
|
|
9
10
|
DEFAULT_BASE = "https://api.clarifai.com"
|
10
11
|
DEFAULT_UI = "https://clarifai.com"
|
@@ -237,8 +238,8 @@ Additionally, these optional params are supported:
|
|
237
238
|
"""
|
238
239
|
user_id = os.environ.get("CLARIFAI_USER_ID", "")
|
239
240
|
app_id = os.environ.get("CLARIFAI_APP_ID", "")
|
240
|
-
token = os.environ.get(
|
241
|
-
pat = os.environ.get(
|
241
|
+
token = os.environ.get(CLARIFAI_SESSION_TOKEN_ENV_VAR, "")
|
242
|
+
pat = os.environ.get(CLARIFAI_PAT_ENV_VAR, "")
|
242
243
|
base = os.environ.get("CLARIFAI_API_BASE", DEFAULT_BASE)
|
243
244
|
ui = os.environ.get("CLARIFAI_UI", DEFAULT_UI)
|
244
245
|
root_certificates_path = os.environ.get("CLARIFAI_ROOT_CERTIFICATES_PATH", None)
|
@@ -65,7 +65,7 @@ class Inputs(Lister, BaseClient):
|
|
65
65
|
|
66
66
|
@staticmethod
|
67
67
|
def _get_proto(input_id: str,
|
68
|
-
dataset_id:
|
68
|
+
dataset_id: str = None,
|
69
69
|
imagepb: Image = None,
|
70
70
|
video_pb: Video = None,
|
71
71
|
audio_pb: Audio = None,
|
@@ -481,7 +481,11 @@ class Inputs(Lister, BaseClient):
|
|
481
481
|
return input_protos
|
482
482
|
|
483
483
|
@staticmethod
|
484
|
-
def get_bbox_proto(input_id: str,
|
484
|
+
def get_bbox_proto(input_id: str,
|
485
|
+
label: str,
|
486
|
+
bbox: List,
|
487
|
+
label_id: str = None,
|
488
|
+
annot_id: str = None) -> Annotation:
|
485
489
|
"""Create an annotation proto for each bounding box, label input pair.
|
486
490
|
|
487
491
|
Args:
|
@@ -489,6 +493,7 @@ class Inputs(Lister, BaseClient):
|
|
489
493
|
label (str): annotation label name
|
490
494
|
bbox (List): a list of a single bbox's coordinates. # bbox ordering: [xmin, ymin, xmax, ymax]
|
491
495
|
label_id (str): annotation label ID
|
496
|
+
annot_id (str): annotation ID
|
492
497
|
|
493
498
|
Returns:
|
494
499
|
An annotation object for the specified input ID.
|
@@ -499,31 +504,35 @@ class Inputs(Lister, BaseClient):
|
|
499
504
|
"""
|
500
505
|
if not isinstance(bbox, list):
|
501
506
|
raise UserError("must be a list of bbox cooridnates")
|
502
|
-
|
503
|
-
|
504
|
-
|
505
|
-
|
506
|
-
|
507
|
-
|
508
|
-
|
509
|
-
|
510
|
-
|
511
|
-
|
512
|
-
|
513
|
-
|
514
|
-
)),
|
515
|
-
|
516
|
-
|
517
|
-
|
518
|
-
|
519
|
-
|
520
|
-
|
507
|
+
annot_data = resources_pb2.Data(regions=[
|
508
|
+
resources_pb2.Region(
|
509
|
+
region_info=resources_pb2.RegionInfo(bounding_box=resources_pb2.BoundingBox(
|
510
|
+
# bbox ordering: [xmin, ymin, xmax, ymax]
|
511
|
+
# top_row must be less than bottom row
|
512
|
+
# left_col must be less than right col
|
513
|
+
top_row=bbox[1], #y_min
|
514
|
+
left_col=bbox[0], #x_min
|
515
|
+
bottom_row=bbox[3], #y_max
|
516
|
+
right_col=bbox[2] #x_max
|
517
|
+
)),
|
518
|
+
data=resources_pb2.Data(concepts=[
|
519
|
+
resources_pb2.Concept(id=f"id-{''.join(label.split(' '))}", name=label, value=1.)
|
520
|
+
if not label_id else resources_pb2.Concept(id=label_id, name=label, value=1.)
|
521
|
+
]))
|
522
|
+
])
|
523
|
+
if annot_id:
|
524
|
+
input_annot_proto = resources_pb2.Annotation(id=annot_id, input_id=input_id, data=annot_data)
|
525
|
+
else:
|
526
|
+
input_annot_proto = resources_pb2.Annotation(input_id=input_id, data=annot_data)
|
521
527
|
|
522
528
|
return input_annot_proto
|
523
529
|
|
524
530
|
@staticmethod
|
525
|
-
def get_mask_proto(input_id: str,
|
526
|
-
|
531
|
+
def get_mask_proto(input_id: str,
|
532
|
+
label: str,
|
533
|
+
polygons: List[List[float]],
|
534
|
+
label_id: str = None,
|
535
|
+
annot_id: str = None) -> Annotation:
|
527
536
|
"""Create an annotation proto for each polygon box, label input pair.
|
528
537
|
|
529
538
|
Args:
|
@@ -531,6 +540,7 @@ class Inputs(Lister, BaseClient):
|
|
531
540
|
label (str): annotation label name
|
532
541
|
polygons (List): Polygon x,y points iterable
|
533
542
|
label_id (str): annotation label ID
|
543
|
+
annot_id (str): annotation ID
|
534
544
|
|
535
545
|
Returns:
|
536
546
|
An annotation object for the specified input ID.
|
@@ -541,23 +551,24 @@ class Inputs(Lister, BaseClient):
|
|
541
551
|
"""
|
542
552
|
if not isinstance(polygons, list):
|
543
553
|
raise UserError("polygons must be a list of points")
|
544
|
-
|
545
|
-
|
546
|
-
|
547
|
-
|
548
|
-
|
549
|
-
|
550
|
-
|
551
|
-
|
552
|
-
|
553
|
-
|
554
|
-
|
555
|
-
|
556
|
-
|
557
|
-
|
558
|
-
|
559
|
-
|
560
|
-
|
554
|
+
annot_data = resources_pb2.Data(regions=[
|
555
|
+
resources_pb2.Region(
|
556
|
+
region_info=resources_pb2.RegionInfo(polygon=resources_pb2.Polygon(
|
557
|
+
points=[
|
558
|
+
resources_pb2.Point(
|
559
|
+
row=_point[1], # row is y point
|
560
|
+
col=_point[0], # col is x point
|
561
|
+
visibility="VISIBLE") for _point in polygons
|
562
|
+
])),
|
563
|
+
data=resources_pb2.Data(concepts=[
|
564
|
+
resources_pb2.Concept(id=f"id-{''.join(label.split(' '))}", name=label, value=1.)
|
565
|
+
if not label_id else resources_pb2.Concept(id=label_id, name=label, value=1.)
|
566
|
+
]))
|
567
|
+
])
|
568
|
+
if annot_id:
|
569
|
+
input_mask_proto = resources_pb2.Annotation(id=annot_id, input_id=input_id, data=annot_data)
|
570
|
+
else:
|
571
|
+
input_mask_proto = resources_pb2.Annotation(input_id=input_id, data=annot_data)
|
561
572
|
|
562
573
|
return input_mask_proto
|
563
574
|
|
@@ -707,7 +718,7 @@ class Inputs(Lister, BaseClient):
|
|
707
718
|
|
708
719
|
return input_job_id, response
|
709
720
|
|
710
|
-
def patch_inputs(self, inputs: List[Input], action: str = 'merge') ->
|
721
|
+
def patch_inputs(self, inputs: List[Input], action: str = 'merge') -> None:
|
711
722
|
"""Patch list of input objects to the app.
|
712
723
|
|
713
724
|
Args:
|
@@ -719,7 +730,6 @@ class Inputs(Lister, BaseClient):
|
|
719
730
|
"""
|
720
731
|
if not isinstance(inputs, list):
|
721
732
|
raise UserError("inputs must be a list of Input objects")
|
722
|
-
uuid.uuid4().hex # generate a unique id for this job
|
723
733
|
request = service_pb2.PatchInputsRequest(
|
724
734
|
user_app_id=self.user_app_id, inputs=inputs, action=action)
|
725
735
|
response = self._grpc_request(self.STUB.PatchInputs, request)
|
@@ -727,9 +737,9 @@ class Inputs(Lister, BaseClient):
|
|
727
737
|
try:
|
728
738
|
self.logger.warning(f"Patch inputs failed, status: {response.annotations[0].status}")
|
729
739
|
except Exception:
|
730
|
-
self.logger.warning(f"Patch inputs failed, status: {response.status
|
731
|
-
|
732
|
-
|
740
|
+
self.logger.warning(f"Patch inputs failed, status: {response.status}")
|
741
|
+
else:
|
742
|
+
self.logger.info("\nPatch Inputs Successful\n%s", response.status)
|
733
743
|
|
734
744
|
def upload_annotations(self, batch_annot: List[resources_pb2.Annotation], show_log: bool = True
|
735
745
|
) -> Union[List[resources_pb2.Annotation], List[None]]:
|
@@ -761,6 +771,67 @@ class Inputs(Lister, BaseClient):
|
|
761
771
|
|
762
772
|
return retry_upload
|
763
773
|
|
774
|
+
def patch_annotations(self, batch_annot: List[resources_pb2.Annotation],
|
775
|
+
action: str = 'merge') -> None:
|
776
|
+
"""Patch image annotations to app.
|
777
|
+
|
778
|
+
Args:
|
779
|
+
batch_annot: annot batch protos
|
780
|
+
action (str): Action to perform on the input. Options: 'merge', 'overwrite', 'remove'.
|
781
|
+
|
782
|
+
"""
|
783
|
+
if not isinstance(batch_annot, list):
|
784
|
+
raise UserError("batch_annot must be a list of Annotation objects")
|
785
|
+
request = service_pb2.PatchAnnotationsRequest(
|
786
|
+
user_app_id=self.user_app_id, annotations=batch_annot, action=action)
|
787
|
+
response = self._grpc_request(self.STUB.PatchAnnotations, request)
|
788
|
+
response_dict = MessageToDict(response)
|
789
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
790
|
+
try:
|
791
|
+
for annot in response_dict["annotations"]:
|
792
|
+
if annot['status']['code'] != status_code_pb2.ANNOTATION_SUCCESS:
|
793
|
+
self.logger.warning(f"Patch annotations failed, status: {annot['status']}")
|
794
|
+
except Exception:
|
795
|
+
self.logger.warning(f"Patch annotations failed due to {response.status}")
|
796
|
+
else:
|
797
|
+
self.logger.info("\nPatch Annotations Uploaded Successful\n%s", response.status)
|
798
|
+
|
799
|
+
def patch_concepts(self,
|
800
|
+
concept_ids: List[str],
|
801
|
+
labels: List[str] = [],
|
802
|
+
values: List[float] = [],
|
803
|
+
action: str = 'overwrite') -> None:
|
804
|
+
"""Patch concepts to app.
|
805
|
+
|
806
|
+
Args:
|
807
|
+
concept_ids: A list of concept
|
808
|
+
labels: A list of label names
|
809
|
+
values: concept value
|
810
|
+
action (str): Action to perform on the input. Options: 'overwrite'.
|
811
|
+
|
812
|
+
"""
|
813
|
+
if not labels:
|
814
|
+
labels = list(concept_ids)
|
815
|
+
if values:
|
816
|
+
concepts=[
|
817
|
+
resources_pb2.Concept(
|
818
|
+
id=concept_id, name=label, value=value)\
|
819
|
+
for concept_id, label, value in zip(concept_ids, labels, values)
|
820
|
+
]
|
821
|
+
else:
|
822
|
+
concepts=[
|
823
|
+
resources_pb2.Concept(
|
824
|
+
id=concept_id, name=label, value=1.)\
|
825
|
+
for concept_id, label in zip(concept_ids, labels)
|
826
|
+
]
|
827
|
+
request = service_pb2.PatchConceptsRequest(
|
828
|
+
user_app_id=self.user_app_id, concepts=concepts, action=action)
|
829
|
+
response = self._grpc_request(self.STUB.PatchConcepts, request)
|
830
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
831
|
+
self.logger.warning(f"Patch Concepts failed, status: {response.status.details}")
|
832
|
+
else:
|
833
|
+
self.logger.info("\nPatch Concepts Successful\n%s", response.status)
|
834
|
+
|
764
835
|
def _upload_batch(self, inputs: List[Input]) -> List[Input]:
|
765
836
|
"""Upload a batch of input objects to the app.
|
766
837
|
|
@@ -12,6 +12,7 @@ from google.protobuf.json_format import MessageToDict
|
|
12
12
|
from PIL import ImageFile
|
13
13
|
from tqdm import tqdm
|
14
14
|
|
15
|
+
from clarifai.constants.dataset import CONTENT_TYPE
|
15
16
|
from clarifai.errors import UserError
|
16
17
|
from clarifai.utils.logging import get_logger
|
17
18
|
|
@@ -61,8 +62,11 @@ class DatasetExportReader:
|
|
61
62
|
def _download_temp_archive(self, archive_url: str,
|
62
63
|
chunk_size: int = 128) -> tempfile.TemporaryFile:
|
63
64
|
"""Downloads the temp archive of InputBatches."""
|
64
|
-
|
65
|
-
r
|
65
|
+
r = self.session.get(archive_url, stream=True)
|
66
|
+
if r.headers['content-type'] == CONTENT_TYPE['json']:
|
67
|
+
raise Exception("File is a json file :\n {}".format(r.json()))
|
68
|
+
elif r.headers['content-type'] != CONTENT_TYPE['zip']:
|
69
|
+
raise Exception('File is not a zip file')
|
66
70
|
temp_file = tempfile.TemporaryFile()
|
67
71
|
for chunk in r.iter_content(chunk_size=chunk_size):
|
68
72
|
temp_file.write(chunk)
|
@@ -1,13 +1,20 @@
|
|
1
1
|
import os
|
2
2
|
|
3
|
-
from clarifai.utils.constants import CLARIFAI_HOME
|
4
|
-
|
5
3
|
MAX_HW_DIM = 1024
|
6
4
|
IMAGE_TENSOR_NAME = "image"
|
7
5
|
TEXT_TENSOR_NAME = "text"
|
8
6
|
|
9
7
|
BUILT_MODEL_EXT = ".clarifai"
|
10
8
|
|
9
|
+
USER_CACHE_DIR = os.path.join(os.path.expanduser("~"), ".cache")
|
10
|
+
CLARIFAI_HOME = os.path.expanduser(
|
11
|
+
os.getenv(
|
12
|
+
"CLARIFAI_HOME",
|
13
|
+
os.path.join(os.getenv("XDG_CACHE_HOME", USER_CACHE_DIR), "clarifai"),
|
14
|
+
))
|
15
|
+
os.makedirs(CLARIFAI_HOME, exist_ok=True)
|
16
|
+
CLARIFAI_PAT_PATH = os.path.join(CLARIFAI_HOME, "pat")
|
17
|
+
|
11
18
|
CLARIFAI_EXAMPLES_REPO = "https://github.com/Clarifai/examples.git"
|
12
19
|
repo_name = CLARIFAI_EXAMPLES_REPO.split("/")[-1].replace(".git", "")
|
13
20
|
CLARIFAI_EXAMPLES_REPO_PATH = os.path.join(CLARIFAI_HOME, repo_name)
|
@@ -14,7 +14,9 @@ from clarifai.constants.rag import MAX_UPLOAD_BATCH_SIZE
|
|
14
14
|
from clarifai.errors import UserError
|
15
15
|
from clarifai.rag.utils import (convert_messages_to_str, format_assistant_message, load_documents,
|
16
16
|
split_document)
|
17
|
+
from clarifai.utils.constants import CLARIFAI_USER_ID_ENV_VAR
|
17
18
|
from clarifai.utils.logging import get_logger
|
19
|
+
from clarifai.utils.misc import get_from_dict_or_env
|
18
20
|
|
19
21
|
DEFAULT_RAG_PROMPT_TEMPLATE = "Context information is below:\n{data.hits}\nGiven the context information and not prior knowledge, answer the query.\nQuery: {data.text.raw}\nAnswer: "
|
20
22
|
|
@@ -75,6 +77,12 @@ class RAG:
|
|
75
77
|
>>> rag_agent = RAG.setup(app_url=YOUR_APP_URL)
|
76
78
|
>>> rag_agent.chat(messages=[{"role":"human", "content":"What is Clarifai"}])
|
77
79
|
"""
|
80
|
+
if not app_url:
|
81
|
+
try:
|
82
|
+
user_id = get_from_dict_or_env(key="user_id", env_key=CLARIFAI_USER_ID_ENV_VAR, **kwargs)
|
83
|
+
except Exception:
|
84
|
+
pass
|
85
|
+
|
78
86
|
now_ts = uuid.uuid4().hex[:10]
|
79
87
|
if user_id and not app_url:
|
80
88
|
user = User(user_id=user_id, base_url=base_url, pat=pat)
|
@@ -3,6 +3,7 @@ from collections import defaultdict
|
|
3
3
|
from typing import Dict, List, Optional, Union
|
4
4
|
|
5
5
|
from rich import print as rprint
|
6
|
+
from rich.console import Console
|
6
7
|
from rich.logging import RichHandler
|
7
8
|
from rich.table import Table
|
8
9
|
from rich.traceback import install
|
@@ -84,7 +85,8 @@ def _configure_logger(name: str, logger_level: Union[int, str] = logging.NOTSET)
|
|
84
85
|
logger.removeHandler(handler)
|
85
86
|
|
86
87
|
# Add the new rich handler and formatter
|
87
|
-
handler = RichHandler(
|
88
|
+
handler = RichHandler(
|
89
|
+
rich_tracebacks=True, log_time_format="%Y-%m-%d %H:%M:%S", console=Console(width=255))
|
88
90
|
formatter = logging.Formatter('%(name)s: %(message)s')
|
89
91
|
handler.setFormatter(formatter)
|
90
92
|
logger.addHandler(handler)
|
@@ -1,5 +1,6 @@
|
|
1
1
|
import logging
|
2
2
|
import os
|
3
|
+
import time
|
3
4
|
import uuid
|
4
5
|
|
5
6
|
import pytest
|
@@ -113,6 +114,16 @@ class TestApp:
|
|
113
114
|
all_datasets = list(create_app.list_datasets())
|
114
115
|
assert len(all_datasets) == 1
|
115
116
|
|
117
|
+
def test_export_dataset(self, create_app):
|
118
|
+
dataset = create_app.dataset(dataset_id=CREATE_DATASET_ID)
|
119
|
+
dataset_demo_version = dataset.create_version()
|
120
|
+
versions = list(dataset.list_versions())
|
121
|
+
time.sleep(5)
|
122
|
+
dataset_demo_version.export(save_path='tests/output_demo.zip')
|
123
|
+
assert len(versions) == 1 #test for create_version
|
124
|
+
assert os.path.exists('tests/output_demo.zip') is True
|
125
|
+
os.remove('tests/output_demo.zip')
|
126
|
+
|
116
127
|
def test_delete_dataset(self, create_app, caplog):
|
117
128
|
with caplog.at_level(logging.INFO):
|
118
129
|
create_app.delete_dataset(CREATE_DATASET_ID)
|
@@ -3,6 +3,7 @@ import os
|
|
3
3
|
import uuid
|
4
4
|
|
5
5
|
import pytest
|
6
|
+
from google.protobuf.struct_pb2 import Struct
|
6
7
|
|
7
8
|
from clarifai.client.user import User
|
8
9
|
from clarifai.datasets.upload.utils import load_module_dataloader
|
@@ -107,6 +108,51 @@ class Testdataupload:
|
|
107
108
|
assert len(paginated_inputs) == 5
|
108
109
|
assert len(image_filterd_inputs) == 2 # 2 images uploaded in the above tests
|
109
110
|
|
111
|
+
def test_patch_inputs(self):
|
112
|
+
metadata = Struct()
|
113
|
+
metadata.update({'test': 'SUCCESS'})
|
114
|
+
new_input = self.input_object._get_proto(input_id='input_1', metadata=metadata)
|
115
|
+
self.input_object.patch_inputs([new_input], action='merge')
|
116
|
+
for input_item in list(self.input_object.list_inputs()):
|
117
|
+
if input_item.id == 'input_1':
|
118
|
+
assert input_item.data.metadata["test"] == "SUCCESS"
|
119
|
+
break
|
120
|
+
|
121
|
+
def test_patch_annotations(self, caplog):
|
122
|
+
bbox_points = [.2, .2, .8, .8]
|
123
|
+
annotation = self.input_object.get_bbox_proto(
|
124
|
+
input_id="input_1",
|
125
|
+
label="input_1_label",
|
126
|
+
bbox=bbox_points,
|
127
|
+
label_id="id-input_1_label",
|
128
|
+
annot_id="input_1_annot")
|
129
|
+
with caplog.at_level(logging.INFO):
|
130
|
+
self.input_object.upload_annotations([annotation])
|
131
|
+
assert "SUCCESS" in caplog.text #upload annotations check
|
132
|
+
|
133
|
+
bbox_points = [.4, .4, .6, .6]
|
134
|
+
annotation = self.input_object.get_bbox_proto(
|
135
|
+
input_id="input_1",
|
136
|
+
label="input_1_label",
|
137
|
+
bbox=bbox_points,
|
138
|
+
label_id="id-input_1_label",
|
139
|
+
annot_id="input_1_annot")
|
140
|
+
self.input_object.patch_annotations([annotation], action='merge')
|
141
|
+
test_annotation = list(self.input_object.list_annotations())[0]
|
142
|
+
assert test_annotation.id == "input_1_annot"
|
143
|
+
annot_bbox = test_annotation.data.regions[0].region_info.bounding_box
|
144
|
+
assert round(annot_bbox.left_col, 1) == .4 and round(annot_bbox.top_row, 1) == .4 and round(
|
145
|
+
annot_bbox.right_col, 1) == .6 and round(annot_bbox.bottom_row, 1) == .6
|
146
|
+
|
147
|
+
def test_patch_concepts(self):
|
148
|
+
self.input_object.patch_concepts(
|
149
|
+
concept_ids=["id-input_1_label"], labels=["SUCCESS"], values=[], action='overwrite')
|
150
|
+
concepts = list(self.app.list_concepts())
|
151
|
+
for concept in concepts:
|
152
|
+
if concept.id == "id-input_1_label":
|
153
|
+
assert concepts[0].name == "SUCCESS"
|
154
|
+
break
|
155
|
+
|
110
156
|
def test_aggregate_inputs(self, caplog):
|
111
157
|
uploaded_inputs = list(self.input_object.list_inputs())
|
112
158
|
with caplog.at_level(logging.INFO):
|
@@ -123,7 +169,7 @@ class Testdataupload:
|
|
123
169
|
self.input_object.delete_inputs(uploaded_inputs)
|
124
170
|
assert "Inputs Deleted" in caplog.text # Testing delete inputs action
|
125
171
|
assert len(uploaded_inputs) == 5 # 5 inputs are uploaded from the CSV file
|
126
|
-
assert len(concepts) ==
|
172
|
+
assert len(concepts) == 3 # Test for list concepts
|
127
173
|
|
128
174
|
def test_upload_folder(self, caplog):
|
129
175
|
self.dataset.upload_from_folder(folder_path=FOLDER_PATH, input_type='image', labels=True)
|
clarifai-10.5.2/VERSION
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
10.5.2
|
@@ -1,12 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
|
3
|
-
USER_CACHE_DIR = os.path.join(os.path.expanduser("~"), ".cache")
|
4
|
-
CLARIFAI_HOME = os.path.expanduser(
|
5
|
-
os.getenv(
|
6
|
-
"CLARIFAI_HOME",
|
7
|
-
os.path.join(os.getenv("XDG_CACHE_HOME", USER_CACHE_DIR), "clarifai"),
|
8
|
-
))
|
9
|
-
os.makedirs(CLARIFAI_HOME, exist_ok=True)
|
10
|
-
CLARIFAI_PAT_PATH = os.path.join(CLARIFAI_HOME, "pat")
|
11
|
-
CLARIFAI_PAT_ENV_VAR = "CLARIFAI_PAT"
|
12
|
-
CLARIFAI_SESSION_TOKEN_ENV_VAR = "CLARIFAI_SESSION_TOKEN"
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/datasets/upload/loaders/imagenet_classification.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/docs/inference_parameters.md
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/__init__.py
RENAMED
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/serializer.py
RENAMED
File without changes
|
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/model_config/triton/wrappers.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
{clarifai-10.5.2 → clarifai-10.5.4}/clarifai/models/model_serving/repo_build/static_files/test.py
RENAMED
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|