clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/__main__.py +1 -1
- clarifai/cli/base.py +144 -136
- clarifai/cli/compute_cluster.py +45 -31
- clarifai/cli/deployment.py +93 -76
- clarifai/cli/model.py +578 -180
- clarifai/cli/nodepool.py +100 -82
- clarifai/client/__init__.py +12 -2
- clarifai/client/app.py +973 -911
- clarifai/client/auth/helper.py +345 -342
- clarifai/client/auth/register.py +7 -7
- clarifai/client/auth/stub.py +107 -106
- clarifai/client/base.py +185 -178
- clarifai/client/compute_cluster.py +214 -180
- clarifai/client/dataset.py +793 -698
- clarifai/client/deployment.py +55 -50
- clarifai/client/input.py +1223 -1088
- clarifai/client/lister.py +47 -45
- clarifai/client/model.py +1939 -1717
- clarifai/client/model_client.py +525 -502
- clarifai/client/module.py +82 -73
- clarifai/client/nodepool.py +358 -213
- clarifai/client/runner.py +58 -0
- clarifai/client/search.py +342 -309
- clarifai/client/user.py +419 -414
- clarifai/client/workflow.py +294 -274
- clarifai/constants/dataset.py +11 -17
- clarifai/constants/model.py +8 -2
- clarifai/datasets/export/inputs_annotations.py +233 -217
- clarifai/datasets/upload/base.py +63 -51
- clarifai/datasets/upload/features.py +43 -38
- clarifai/datasets/upload/image.py +237 -207
- clarifai/datasets/upload/loaders/coco_captions.py +34 -32
- clarifai/datasets/upload/loaders/coco_detection.py +72 -65
- clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
- clarifai/datasets/upload/loaders/xview_detection.py +274 -132
- clarifai/datasets/upload/multimodal.py +55 -46
- clarifai/datasets/upload/text.py +55 -47
- clarifai/datasets/upload/utils.py +250 -234
- clarifai/errors.py +51 -50
- clarifai/models/api.py +260 -238
- clarifai/modules/css.py +50 -50
- clarifai/modules/pages.py +33 -33
- clarifai/rag/rag.py +312 -288
- clarifai/rag/utils.py +91 -84
- clarifai/runners/models/model_builder.py +906 -802
- clarifai/runners/models/model_class.py +370 -331
- clarifai/runners/models/model_run_locally.py +459 -419
- clarifai/runners/models/model_runner.py +170 -162
- clarifai/runners/models/model_servicer.py +78 -70
- clarifai/runners/server.py +111 -101
- clarifai/runners/utils/code_script.py +225 -187
- clarifai/runners/utils/const.py +4 -1
- clarifai/runners/utils/data_types/__init__.py +12 -0
- clarifai/runners/utils/data_types/data_types.py +598 -0
- clarifai/runners/utils/data_utils.py +387 -440
- clarifai/runners/utils/loader.py +247 -227
- clarifai/runners/utils/method_signatures.py +411 -386
- clarifai/runners/utils/openai_convertor.py +108 -109
- clarifai/runners/utils/serializers.py +175 -179
- clarifai/runners/utils/url_fetcher.py +35 -35
- clarifai/schema/search.py +56 -63
- clarifai/urls/helper.py +125 -102
- clarifai/utils/cli.py +129 -123
- clarifai/utils/config.py +127 -87
- clarifai/utils/constants.py +49 -0
- clarifai/utils/evaluation/helpers.py +503 -466
- clarifai/utils/evaluation/main.py +431 -393
- clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
- clarifai/utils/logging.py +324 -306
- clarifai/utils/misc.py +60 -56
- clarifai/utils/model_train.py +165 -146
- clarifai/utils/protobuf.py +126 -103
- clarifai/versions.py +3 -1
- clarifai/workflows/export.py +48 -50
- clarifai/workflows/utils.py +39 -36
- clarifai/workflows/validate.py +55 -43
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
- clarifai-11.4.0.dist-info/RECORD +109 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/errors.cpython-311.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
- clarifai/client/cli/__init__.py +0 -0
- clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/base_cli.py +0 -88
- clarifai/client/cli/model_cli.py +0 -29
- clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
- clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_class_refract.py +0 -80
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/models/temp.py +0 -25
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -231
- clarifai/runners/utils/data_handler_refract.py +0 -213
- clarifai/runners/utils/data_types.py +0 -469
- clarifai/runners/utils/logger.py +0 -0
- clarifai/runners/utils/openai_format.py +0 -87
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
- clarifai-11.3.0rc2.dist-info/RECORD +0 -322
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
clarifai/client/search.py
CHANGED
@@ -10,328 +10,361 @@ from schema import SchemaError
|
|
10
10
|
from clarifai.client.base import BaseClient
|
11
11
|
from clarifai.client.input import Inputs
|
12
12
|
from clarifai.client.lister import Lister
|
13
|
-
from clarifai.constants.search import (
|
14
|
-
|
13
|
+
from clarifai.constants.search import (
|
14
|
+
DEFAULT_SEARCH_ALGORITHM,
|
15
|
+
DEFAULT_SEARCH_METRIC,
|
16
|
+
DEFAULT_TOP_K,
|
17
|
+
)
|
15
18
|
from clarifai.errors import UserError
|
16
19
|
from clarifai.schema.search import get_schema
|
17
20
|
|
18
21
|
|
19
22
|
class Search(Lister, BaseClient):
|
23
|
+
def __init__(
|
24
|
+
self,
|
25
|
+
user_id: str,
|
26
|
+
app_id: str,
|
27
|
+
top_k: int = None,
|
28
|
+
metric: str = DEFAULT_SEARCH_METRIC,
|
29
|
+
algorithm: str = DEFAULT_SEARCH_ALGORITHM,
|
30
|
+
pagination: bool = False,
|
31
|
+
base_url: str = "https://api.clarifai.com",
|
32
|
+
pat: str = None,
|
33
|
+
token: str = None,
|
34
|
+
root_certificates_path: str = None,
|
35
|
+
):
|
36
|
+
"""Initialize the Search object.
|
37
|
+
|
38
|
+
Args:
|
39
|
+
user_id (str): User ID.
|
40
|
+
app_id (str): App ID.
|
41
|
+
top_k (int, optional): Top K results to retrieve. Defaults to 10.
|
42
|
+
metric (str, optional): Similarity metric (either 'cosine' or 'euclidean'). Defaults to 'cosine'.
|
43
|
+
alogrithm (str, optional): Search algorithm (either 'nearest_neighbor' or 'brute_force'). Defaults to 'nearest_neighbor'.
|
44
|
+
pagination (bool, optional): Enable pagination. Defaults to False.
|
45
|
+
base_url (str, optional): Base API url. Defaults to "https://api.clarifai.com".
|
46
|
+
pat (str, optional): A personal access token for authentication. Can be set as env var CLARIFAI_PAT
|
47
|
+
token (str): A session token for authentication. Accepts either a session token or a pat. Can be set as env var CLARIFAI_SESSION_TOKEN
|
48
|
+
root_certificates_path (str): Path to the SSL root certificates file, used to establish secure gRPC connections.
|
49
|
+
|
50
|
+
Raises:
|
51
|
+
UserError: If the metric is not 'cosine' or 'euclidean'.
|
52
|
+
UserError: If the algorithm is not 'nearest_neighbor' or 'brute_force'.
|
53
|
+
"""
|
54
|
+
if metric not in ["cosine", "euclidean"]:
|
55
|
+
raise UserError("Metric should be either cosine or euclidean")
|
56
|
+
if algorithm not in ["nearest_neighbor", "brute_force"]:
|
57
|
+
raise UserError("Algorithm should be either nearest_neighbor or brute_force")
|
58
|
+
if metric == "cosine" and algorithm == "nearest_neighbor":
|
59
|
+
raise UserError(
|
60
|
+
"Cosine distance metric is not supported with nearest neighbor algorithm"
|
61
|
+
)
|
62
|
+
if top_k and pagination:
|
63
|
+
raise UserError(
|
64
|
+
"top_k and pagination cannot be used together. Please set pagination to False."
|
65
|
+
)
|
66
|
+
if not top_k and not pagination:
|
67
|
+
top_k = DEFAULT_TOP_K
|
68
|
+
|
69
|
+
self.user_id = user_id
|
70
|
+
self.app_id = app_id
|
71
|
+
self.metric_distance = dict(cosine="COSINE_DISTANCE", euclidean="EUCLIDEAN_DISTANCE")[
|
72
|
+
metric
|
73
|
+
]
|
74
|
+
self.algorithm = algorithm
|
75
|
+
self.data_proto = resources_pb2.Data()
|
76
|
+
self.top_k = top_k
|
77
|
+
self.pagination = pagination
|
78
|
+
self.inputs = Inputs(
|
79
|
+
user_id=self.user_id, app_id=self.app_id, pat=pat, token=token, base_url=base_url
|
80
|
+
)
|
81
|
+
self.rank_filter_schema = get_schema()
|
82
|
+
BaseClient.__init__(
|
83
|
+
self,
|
84
|
+
user_id=self.user_id,
|
85
|
+
app_id=self.app_id,
|
86
|
+
base=base_url,
|
87
|
+
pat=pat,
|
88
|
+
token=token,
|
89
|
+
root_certificates_path=root_certificates_path,
|
90
|
+
)
|
91
|
+
Lister.__init__(self, page_size=1000)
|
92
|
+
|
93
|
+
def _get_annot_proto(self, **kwargs):
|
94
|
+
"""Get an Annotation proto message based on keyword arguments.
|
95
|
+
|
96
|
+
Args:
|
97
|
+
**kwargs: Keyword arguments specifying the resource.
|
98
|
+
|
99
|
+
Returns:
|
100
|
+
resources_pb2.Annotation: An Annotation proto message.
|
101
|
+
"""
|
102
|
+
if not kwargs:
|
103
|
+
return resources_pb2.Annotation()
|
104
|
+
|
105
|
+
self.data_proto = resources_pb2.Data()
|
106
|
+
for key, value in kwargs.items():
|
107
|
+
if key == "image_bytes":
|
108
|
+
image_proto = self.inputs.get_input_from_bytes("", image_bytes=value).data.image
|
109
|
+
self.data_proto.image.CopyFrom(image_proto)
|
110
|
+
|
111
|
+
elif key == "image_url":
|
112
|
+
image_proto = self.inputs.get_input_from_url("", image_url=value).data.image
|
113
|
+
self.data_proto.image.CopyFrom(image_proto)
|
114
|
+
|
115
|
+
elif key == "concepts":
|
116
|
+
for concept in value:
|
117
|
+
concept_proto = resources_pb2.Concept(**concept)
|
118
|
+
self.data_proto.concepts.add().CopyFrom(concept_proto)
|
119
|
+
|
120
|
+
elif key == "text_raw":
|
121
|
+
text_proto = self.inputs.get_input_from_bytes(
|
122
|
+
"", text_bytes=bytes(value, 'utf-8')
|
123
|
+
).data.text
|
124
|
+
self.data_proto.text.CopyFrom(text_proto)
|
125
|
+
|
126
|
+
elif key == "metadata":
|
127
|
+
metadata_struct = Struct()
|
128
|
+
metadata_struct.update(value)
|
129
|
+
self.data_proto.metadata.CopyFrom(metadata_struct)
|
130
|
+
|
131
|
+
elif key == "geo_point":
|
132
|
+
geo_point_proto = self._get_geo_point_proto(
|
133
|
+
value["longitude"], value["latitude"], value["geo_limit"]
|
134
|
+
)
|
135
|
+
self.data_proto.geo.CopyFrom(geo_point_proto)
|
136
|
+
|
137
|
+
else:
|
138
|
+
raise UserError(f"kwargs contain key that is not supported: {key}")
|
139
|
+
return resources_pb2.Annotation(data=self.data_proto)
|
140
|
+
|
141
|
+
def _get_input_proto(self, **kwargs):
|
142
|
+
"""Get an Input proto message based on keyword arguments.
|
20
143
|
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
144
|
+
Args:
|
145
|
+
**kwargs: Keyword arguments specifying the resource.
|
146
|
+
|
147
|
+
Returns:
|
148
|
+
resources_pb2.Input: An Input proto message.
|
149
|
+
"""
|
150
|
+
if not kwargs:
|
151
|
+
return resources_pb2.Input()
|
152
|
+
|
153
|
+
self.input_proto = resources_pb2.Input()
|
154
|
+
self.data_proto = resources_pb2.Data()
|
155
|
+
for key, value in kwargs.items():
|
156
|
+
if key == "input_types":
|
157
|
+
for input_type in value:
|
158
|
+
if input_type == "image":
|
159
|
+
self.data_proto.image.CopyFrom(resources_pb2.Image())
|
160
|
+
elif input_type == "text":
|
161
|
+
self.data_proto.text.CopyFrom(resources_pb2.Text())
|
162
|
+
elif input_type == "audio":
|
163
|
+
self.data_proto.audio.CopyFrom(resources_pb2.Audio())
|
164
|
+
elif input_type == "video":
|
165
|
+
self.data_proto.video.CopyFrom(resources_pb2.Video())
|
166
|
+
self.input_proto.data.CopyFrom(self.data_proto)
|
167
|
+
elif key == "input_dataset_ids":
|
168
|
+
self.input_proto.dataset_ids.extend(value)
|
169
|
+
elif key == "input_status_code":
|
170
|
+
self.input_proto.status.code = value
|
171
|
+
else:
|
172
|
+
raise UserError(f"kwargs contain key that is not supported: {key}")
|
173
|
+
return self.input_proto
|
174
|
+
|
175
|
+
def _get_geo_point_proto(
|
176
|
+
self, longitude: float, latitude: float, geo_limit: float
|
177
|
+
) -> resources_pb2.Geo:
|
178
|
+
"""Get a GeoPoint proto message based on geographical data.
|
179
|
+
|
180
|
+
Args:
|
181
|
+
longitude (float): Longitude coordinate.
|
182
|
+
latitude (float): Latitude coordinate.
|
183
|
+
geo_limit (float): Geographical limit.
|
184
|
+
|
185
|
+
Returns:
|
186
|
+
resources_pb2.Geo: A Geo proto message.
|
187
|
+
"""
|
188
|
+
return resources_pb2.Geo(
|
189
|
+
geo_point=resources_pb2.GeoPoint(longitude=longitude, latitude=latitude),
|
190
|
+
geo_limit=resources_pb2.GeoLimit(type="withinKilometers", value=geo_limit),
|
191
|
+
)
|
192
|
+
|
193
|
+
def _list_topk_generator(
|
194
|
+
self, endpoint: Callable[..., Any], proto_message: Any, request_data: Dict[str, Any]
|
195
|
+
) -> Generator[Dict[str, Any], None, None]:
|
196
|
+
"""Lists all pages of a resource.
|
197
|
+
|
198
|
+
Args:
|
199
|
+
endpoint (Callable): The endpoint to call.
|
200
|
+
proto_message (Any): The proto message to use.
|
201
|
+
request_data (dict): The request data to use.
|
202
|
+
|
203
|
+
Yields:
|
204
|
+
response_dict: The next item in the listing.
|
205
|
+
"""
|
206
|
+
max_pages = ceil(self.top_k / self.default_page_size)
|
207
|
+
total_hits = 0
|
208
|
+
page = 1
|
209
|
+
while page <= max_pages:
|
210
|
+
if page == max_pages:
|
211
|
+
per_page = self.top_k - total_hits
|
212
|
+
else:
|
213
|
+
per_page = self.default_page_size
|
214
|
+
request_data['pagination'] = service_pb2.Pagination(page=page, per_page=per_page)
|
215
|
+
response = self._grpc_request(endpoint, proto_message(**request_data))
|
216
|
+
dict_response = MessageToDict(response, preserving_proto_field_name=True)
|
217
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
218
|
+
if "page * perPage cannot exceed" in str(response.status.details):
|
219
|
+
msg = (
|
220
|
+
f"Your top_k is set to {self.top_k}. "
|
221
|
+
f"The current pagination settings exceed the limit. Please reach out to "
|
222
|
+
f"support@clarifai.com to request an increase for your use case.\n"
|
223
|
+
f"req_id: {response.status.req_id}"
|
224
|
+
)
|
225
|
+
raise UserError(msg)
|
226
|
+
else:
|
227
|
+
raise Exception(f"Listing failed with response {response!r}")
|
228
|
+
|
229
|
+
if 'hits' not in list(dict_response.keys()):
|
230
|
+
break
|
231
|
+
page += 1
|
232
|
+
total_hits += per_page
|
233
|
+
yield response
|
234
|
+
|
235
|
+
def _list_all_pages_generator(
|
73
236
|
self,
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
def _get_annot_proto(self, **kwargs):
|
83
|
-
"""Get an Annotation proto message based on keyword arguments.
|
84
|
-
|
85
|
-
Args:
|
86
|
-
**kwargs: Keyword arguments specifying the resource.
|
87
|
-
|
88
|
-
Returns:
|
89
|
-
resources_pb2.Annotation: An Annotation proto message.
|
90
|
-
"""
|
91
|
-
if not kwargs:
|
92
|
-
return resources_pb2.Annotation()
|
93
|
-
|
94
|
-
self.data_proto = resources_pb2.Data()
|
95
|
-
for key, value in kwargs.items():
|
96
|
-
if key == "image_bytes":
|
97
|
-
image_proto = self.inputs.get_input_from_bytes("", image_bytes=value).data.image
|
98
|
-
self.data_proto.image.CopyFrom(image_proto)
|
99
|
-
|
100
|
-
elif key == "image_url":
|
101
|
-
image_proto = self.inputs.get_input_from_url("", image_url=value).data.image
|
102
|
-
self.data_proto.image.CopyFrom(image_proto)
|
103
|
-
|
104
|
-
elif key == "concepts":
|
105
|
-
for concept in value:
|
106
|
-
concept_proto = resources_pb2.Concept(**concept)
|
107
|
-
self.data_proto.concepts.add().CopyFrom(concept_proto)
|
108
|
-
|
109
|
-
elif key == "text_raw":
|
110
|
-
text_proto = self.inputs.get_input_from_bytes(
|
111
|
-
"", text_bytes=bytes(value, 'utf-8')).data.text
|
112
|
-
self.data_proto.text.CopyFrom(text_proto)
|
113
|
-
|
114
|
-
elif key == "metadata":
|
115
|
-
metadata_struct = Struct()
|
116
|
-
metadata_struct.update(value)
|
117
|
-
self.data_proto.metadata.CopyFrom(metadata_struct)
|
118
|
-
|
119
|
-
elif key == "geo_point":
|
120
|
-
geo_point_proto = self._get_geo_point_proto(value["longitude"], value["latitude"],
|
121
|
-
value["geo_limit"])
|
122
|
-
self.data_proto.geo.CopyFrom(geo_point_proto)
|
123
|
-
|
124
|
-
else:
|
125
|
-
raise UserError(f"kwargs contain key that is not supported: {key}")
|
126
|
-
return resources_pb2.Annotation(data=self.data_proto)
|
127
|
-
|
128
|
-
def _get_input_proto(self, **kwargs):
|
129
|
-
"""Get an Input proto message based on keyword arguments.
|
130
|
-
|
131
|
-
Args:
|
132
|
-
**kwargs: Keyword arguments specifying the resource.
|
133
|
-
|
134
|
-
Returns:
|
135
|
-
resources_pb2.Input: An Input proto message.
|
136
|
-
"""
|
137
|
-
if not kwargs:
|
138
|
-
return resources_pb2.Input()
|
139
|
-
|
140
|
-
self.input_proto = resources_pb2.Input()
|
141
|
-
self.data_proto = resources_pb2.Data()
|
142
|
-
for key, value in kwargs.items():
|
143
|
-
if key == "input_types":
|
144
|
-
for input_type in value:
|
145
|
-
if input_type == "image":
|
146
|
-
self.data_proto.image.CopyFrom(resources_pb2.Image())
|
147
|
-
elif input_type == "text":
|
148
|
-
self.data_proto.text.CopyFrom(resources_pb2.Text())
|
149
|
-
elif input_type == "audio":
|
150
|
-
self.data_proto.audio.CopyFrom(resources_pb2.Audio())
|
151
|
-
elif input_type == "video":
|
152
|
-
self.data_proto.video.CopyFrom(resources_pb2.Video())
|
153
|
-
self.input_proto.data.CopyFrom(self.data_proto)
|
154
|
-
elif key == "input_dataset_ids":
|
155
|
-
self.input_proto.dataset_ids.extend(value)
|
156
|
-
elif key == "input_status_code":
|
157
|
-
self.input_proto.status.code = value
|
158
|
-
else:
|
159
|
-
raise UserError(f"kwargs contain key that is not supported: {key}")
|
160
|
-
return self.input_proto
|
161
|
-
|
162
|
-
def _get_geo_point_proto(self, longitude: float, latitude: float,
|
163
|
-
geo_limit: float) -> resources_pb2.Geo:
|
164
|
-
"""Get a GeoPoint proto message based on geographical data.
|
165
|
-
|
166
|
-
Args:
|
167
|
-
longitude (float): Longitude coordinate.
|
168
|
-
latitude (float): Latitude coordinate.
|
169
|
-
geo_limit (float): Geographical limit.
|
170
|
-
|
171
|
-
Returns:
|
172
|
-
resources_pb2.Geo: A Geo proto message.
|
173
|
-
"""
|
174
|
-
return resources_pb2.Geo(
|
175
|
-
geo_point=resources_pb2.GeoPoint(longitude=longitude, latitude=latitude),
|
176
|
-
geo_limit=resources_pb2.GeoLimit(type="withinKilometers", value=geo_limit))
|
177
|
-
|
178
|
-
def _list_topk_generator(self, endpoint: Callable[..., Any], proto_message: Any,
|
179
|
-
request_data: Dict[str, Any]) -> Generator[Dict[str, Any], None, None]:
|
180
|
-
"""Lists all pages of a resource.
|
237
|
+
endpoint: Callable,
|
238
|
+
proto_message: Any,
|
239
|
+
request_data: Dict[str, Any],
|
240
|
+
page_no: int = None,
|
241
|
+
per_page: int = None,
|
242
|
+
) -> Generator[Dict[str, Any], None, None]:
|
243
|
+
"""Lists pages of a resource.
|
181
244
|
|
182
245
|
Args:
|
183
246
|
endpoint (Callable): The endpoint to call.
|
184
247
|
proto_message (Any): The proto message to use.
|
185
248
|
request_data (dict): The request data to use.
|
249
|
+
page_no (int): The page number to list.
|
250
|
+
per_page (int): The number of items per page.
|
186
251
|
|
187
252
|
Yields:
|
188
253
|
response_dict: The next item in the listing.
|
189
254
|
"""
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
|
229
|
-
|
230
|
-
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
241
|
-
|
242
|
-
|
243
|
-
|
244
|
-
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
|
249
|
-
|
250
|
-
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
255
|
-
|
256
|
-
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
all_ranks.append(resources_pb2.Rank(annotation=rank_annot_proto))
|
307
|
-
|
308
|
-
all_filters = []
|
309
|
-
# check for filters which is compatible with input proto
|
310
|
-
for each_filter in filters:
|
311
|
-
input_dict = {
|
312
|
-
key: each_filter.pop(key)
|
313
|
-
for key in ['input_types', 'input_dataset_ids', 'input_status_code']
|
314
|
-
if key in each_filter
|
315
|
-
}
|
316
|
-
|
317
|
-
all_filters.append(
|
318
|
-
resources_pb2.Filter(
|
319
|
-
annotation=self._get_annot_proto(**each_filter),
|
320
|
-
input=self._get_input_proto(**input_dict)))
|
321
|
-
|
322
|
-
# Create a PostInputsSearchesRequest proto message
|
323
|
-
request_data = dict(
|
324
|
-
user_app_id=self.user_app_id,
|
325
|
-
searches=[
|
326
|
-
resources_pb2.Search(
|
327
|
-
query=resources_pb2.Query(ranks=all_ranks, filters=all_filters),
|
328
|
-
algorithm=self.algorithm,
|
329
|
-
metric=self.metric_distance)
|
330
|
-
])
|
331
|
-
# Calls PostInputsSearches for annotation ranks, input filters
|
332
|
-
if self.pagination:
|
333
|
-
return self._list_all_pages_generator(self.STUB.PostInputsSearches,
|
334
|
-
service_pb2.PostInputsSearchesRequest, request_data,
|
335
|
-
page_no, per_page)
|
336
|
-
return self._list_topk_generator(self.STUB.PostInputsSearches,
|
337
|
-
service_pb2.PostInputsSearchesRequest, request_data)
|
255
|
+
page = 1 if not page_no else page_no
|
256
|
+
if page_no and not per_page:
|
257
|
+
per_page = self.default_page_size
|
258
|
+
while True:
|
259
|
+
request_data['pagination'] = service_pb2.Pagination(page=page, per_page=per_page)
|
260
|
+
response = self._grpc_request(endpoint, proto_message(**request_data))
|
261
|
+
dict_response = MessageToDict(response, preserving_proto_field_name=True)
|
262
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
263
|
+
if "page * perPage cannot exceed" in str(response.status.details):
|
264
|
+
msg = (
|
265
|
+
f"Your pagination is set to {page_no * per_page}. "
|
266
|
+
f"The current pagination settings exceed the limit. Please reach out to "
|
267
|
+
f"support@clarifai.com to request an increase for your use case.\n"
|
268
|
+
f"req_id: {response.status.req_id}"
|
269
|
+
)
|
270
|
+
raise UserError(msg)
|
271
|
+
else:
|
272
|
+
raise Exception(f"Listing failed with response {response!r}")
|
273
|
+
if 'hits' not in list(dict_response.keys()):
|
274
|
+
break
|
275
|
+
yield response
|
276
|
+
if page_no is not None or per_page is not None:
|
277
|
+
break
|
278
|
+
page += 1
|
279
|
+
|
280
|
+
def query(
|
281
|
+
self,
|
282
|
+
ranks=[{}],
|
283
|
+
filters=[{}],
|
284
|
+
page_no: int = None,
|
285
|
+
per_page: int = None,
|
286
|
+
):
|
287
|
+
"""Perform a query with rank and filters.
|
288
|
+
|
289
|
+
Args:
|
290
|
+
ranks (List[Dict], optional): List of rank parameters. Defaults to [{}].
|
291
|
+
filters (List[Dict], optional): List of filter parameters. Defaults to [{}].
|
292
|
+
page_no (int): The page number to list.
|
293
|
+
per_page (int): The number of items per page.
|
294
|
+
|
295
|
+
Returns:
|
296
|
+
Generator[Dict[str, Any], None, None]: A generator of query results.
|
297
|
+
|
298
|
+
Examples:
|
299
|
+
Get successful inputs of type image or text
|
300
|
+
>>> from clarifai.client.search import Search
|
301
|
+
>>> search = Search(user_id='user_id', app_id='app_id', top_k=10, metric='cosine')
|
302
|
+
# This performs OR operation on input_types and input_status_code
|
303
|
+
>>> res = search.query(filters=[{'input_types': ['image', 'text'], 'input_status_code': 30000}])
|
304
|
+
|
305
|
+
Vector search over inputs
|
306
|
+
>>> from clarifai.client.search import Search
|
307
|
+
>>> search = Search(user_id='user_id', app_id='app_id' , metric='cosine', pagination=True)
|
308
|
+
>>> res = search.query(ranks=[{'image_url': 'https://samples.clarifai.com/dog.tiff'}],page_no=2, per_page=5)
|
309
|
+
|
310
|
+
Note:
|
311
|
+
For schema of rank and filter, please refer to [schema](https://github.com/Clarifai/clarifai-python/tree/master/clarifai/schema/search.py).
|
312
|
+
For more detailed search examples, please refer to [examples](https://github.com/Clarifai/examples/tree/main/search).
|
313
|
+
"""
|
314
|
+
if not self.pagination and (per_page or page_no):
|
315
|
+
raise UserError(
|
316
|
+
"Pagination settings are only available when pagination is enabled."
|
317
|
+
"Please set Search(pagination=True) while initializing Search()."
|
318
|
+
)
|
319
|
+
try:
|
320
|
+
self.rank_filter_schema.validate(ranks)
|
321
|
+
self.rank_filter_schema.validate(filters)
|
322
|
+
except SchemaError as err:
|
323
|
+
raise UserError(f"Invalid rank or filter input: {err}")
|
324
|
+
|
325
|
+
# For each rank, create a Rank proto message
|
326
|
+
# For ranks it only allows resources_pb2.Annotation proto, so no need of splitting protos into annotation and input.
|
327
|
+
all_ranks = []
|
328
|
+
for rank_dict in ranks:
|
329
|
+
rank_annot_proto = self._get_annot_proto(**rank_dict)
|
330
|
+
all_ranks.append(resources_pb2.Rank(annotation=rank_annot_proto))
|
331
|
+
|
332
|
+
all_filters = []
|
333
|
+
# check for filters which is compatible with input proto
|
334
|
+
for each_filter in filters:
|
335
|
+
input_dict = {
|
336
|
+
key: each_filter.pop(key)
|
337
|
+
for key in ['input_types', 'input_dataset_ids', 'input_status_code']
|
338
|
+
if key in each_filter
|
339
|
+
}
|
340
|
+
|
341
|
+
all_filters.append(
|
342
|
+
resources_pb2.Filter(
|
343
|
+
annotation=self._get_annot_proto(**each_filter),
|
344
|
+
input=self._get_input_proto(**input_dict),
|
345
|
+
)
|
346
|
+
)
|
347
|
+
|
348
|
+
# Create a PostInputsSearchesRequest proto message
|
349
|
+
request_data = dict(
|
350
|
+
user_app_id=self.user_app_id,
|
351
|
+
searches=[
|
352
|
+
resources_pb2.Search(
|
353
|
+
query=resources_pb2.Query(ranks=all_ranks, filters=all_filters),
|
354
|
+
algorithm=self.algorithm,
|
355
|
+
metric=self.metric_distance,
|
356
|
+
)
|
357
|
+
],
|
358
|
+
)
|
359
|
+
# Calls PostInputsSearches for annotation ranks, input filters
|
360
|
+
if self.pagination:
|
361
|
+
return self._list_all_pages_generator(
|
362
|
+
self.STUB.PostInputsSearches,
|
363
|
+
service_pb2.PostInputsSearchesRequest,
|
364
|
+
request_data,
|
365
|
+
page_no,
|
366
|
+
per_page,
|
367
|
+
)
|
368
|
+
return self._list_topk_generator(
|
369
|
+
self.STUB.PostInputsSearches, service_pb2.PostInputsSearchesRequest, request_data
|
370
|
+
)
|