clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/__main__.py +1 -1
- clarifai/cli/base.py +144 -136
- clarifai/cli/compute_cluster.py +45 -31
- clarifai/cli/deployment.py +93 -76
- clarifai/cli/model.py +578 -180
- clarifai/cli/nodepool.py +100 -82
- clarifai/client/__init__.py +12 -2
- clarifai/client/app.py +973 -911
- clarifai/client/auth/helper.py +345 -342
- clarifai/client/auth/register.py +7 -7
- clarifai/client/auth/stub.py +107 -106
- clarifai/client/base.py +185 -178
- clarifai/client/compute_cluster.py +214 -180
- clarifai/client/dataset.py +793 -698
- clarifai/client/deployment.py +55 -50
- clarifai/client/input.py +1223 -1088
- clarifai/client/lister.py +47 -45
- clarifai/client/model.py +1939 -1717
- clarifai/client/model_client.py +525 -502
- clarifai/client/module.py +82 -73
- clarifai/client/nodepool.py +358 -213
- clarifai/client/runner.py +58 -0
- clarifai/client/search.py +342 -309
- clarifai/client/user.py +419 -414
- clarifai/client/workflow.py +294 -274
- clarifai/constants/dataset.py +11 -17
- clarifai/constants/model.py +8 -2
- clarifai/datasets/export/inputs_annotations.py +233 -217
- clarifai/datasets/upload/base.py +63 -51
- clarifai/datasets/upload/features.py +43 -38
- clarifai/datasets/upload/image.py +237 -207
- clarifai/datasets/upload/loaders/coco_captions.py +34 -32
- clarifai/datasets/upload/loaders/coco_detection.py +72 -65
- clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
- clarifai/datasets/upload/loaders/xview_detection.py +274 -132
- clarifai/datasets/upload/multimodal.py +55 -46
- clarifai/datasets/upload/text.py +55 -47
- clarifai/datasets/upload/utils.py +250 -234
- clarifai/errors.py +51 -50
- clarifai/models/api.py +260 -238
- clarifai/modules/css.py +50 -50
- clarifai/modules/pages.py +33 -33
- clarifai/rag/rag.py +312 -288
- clarifai/rag/utils.py +91 -84
- clarifai/runners/models/model_builder.py +906 -802
- clarifai/runners/models/model_class.py +370 -331
- clarifai/runners/models/model_run_locally.py +459 -419
- clarifai/runners/models/model_runner.py +170 -162
- clarifai/runners/models/model_servicer.py +78 -70
- clarifai/runners/server.py +111 -101
- clarifai/runners/utils/code_script.py +225 -187
- clarifai/runners/utils/const.py +4 -1
- clarifai/runners/utils/data_types/__init__.py +12 -0
- clarifai/runners/utils/data_types/data_types.py +598 -0
- clarifai/runners/utils/data_utils.py +387 -440
- clarifai/runners/utils/loader.py +247 -227
- clarifai/runners/utils/method_signatures.py +411 -386
- clarifai/runners/utils/openai_convertor.py +108 -109
- clarifai/runners/utils/serializers.py +175 -179
- clarifai/runners/utils/url_fetcher.py +35 -35
- clarifai/schema/search.py +56 -63
- clarifai/urls/helper.py +125 -102
- clarifai/utils/cli.py +129 -123
- clarifai/utils/config.py +127 -87
- clarifai/utils/constants.py +49 -0
- clarifai/utils/evaluation/helpers.py +503 -466
- clarifai/utils/evaluation/main.py +431 -393
- clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
- clarifai/utils/logging.py +324 -306
- clarifai/utils/misc.py +60 -56
- clarifai/utils/model_train.py +165 -146
- clarifai/utils/protobuf.py +126 -103
- clarifai/versions.py +3 -1
- clarifai/workflows/export.py +48 -50
- clarifai/workflows/utils.py +39 -36
- clarifai/workflows/validate.py +55 -43
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
- clarifai-11.4.0.dist-info/RECORD +109 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/errors.cpython-311.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
- clarifai/client/cli/__init__.py +0 -0
- clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/base_cli.py +0 -88
- clarifai/client/cli/model_cli.py +0 -29
- clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
- clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_class_refract.py +0 -80
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/models/temp.py +0 -25
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -231
- clarifai/runners/utils/data_handler_refract.py +0 -213
- clarifai/runners/utils/data_types.py +0 -469
- clarifai/runners/utils/logger.py +0 -0
- clarifai/runners/utils/openai_format.py +0 -87
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
- clarifai-11.3.0rc2.dist-info/RECORD +0 -322
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
- {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
clarifai/cli/model.py
CHANGED
@@ -1,11 +1,28 @@
|
|
1
|
+
import os
|
2
|
+
import shutil
|
3
|
+
|
1
4
|
import click
|
2
5
|
|
3
6
|
from clarifai.cli.base import cli
|
7
|
+
from clarifai.utils.cli import validate_context
|
8
|
+
from clarifai.utils.constants import (
|
9
|
+
DEFAULT_LOCAL_DEV_APP_ID,
|
10
|
+
DEFAULT_LOCAL_DEV_COMPUTE_CLUSTER_CONFIG,
|
11
|
+
DEFAULT_LOCAL_DEV_COMPUTE_CLUSTER_ID,
|
12
|
+
DEFAULT_LOCAL_DEV_DEPLOYMENT_ID,
|
13
|
+
DEFAULT_LOCAL_DEV_MODEL_ID,
|
14
|
+
DEFAULT_LOCAL_DEV_MODEL_TYPE,
|
15
|
+
DEFAULT_LOCAL_DEV_NODEPOOL_CONFIG,
|
16
|
+
DEFAULT_LOCAL_DEV_NODEPOOL_ID,
|
17
|
+
)
|
18
|
+
from clarifai.utils.logging import logger
|
4
19
|
|
5
20
|
|
6
|
-
@cli.group(
|
21
|
+
@cli.group(
|
22
|
+
['model'], context_settings={'max_content_width': shutil.get_terminal_size().columns - 10}
|
23
|
+
)
|
7
24
|
def model():
|
8
|
-
|
25
|
+
"""Manage models: upload, test, local dev, predict, etc"""
|
9
26
|
|
10
27
|
|
11
28
|
@model.command()
|
@@ -16,22 +33,21 @@ def model():
|
|
16
33
|
type=click.Choice(['runtime', 'build', 'upload'], case_sensitive=True),
|
17
34
|
default="upload",
|
18
35
|
show_default=True,
|
19
|
-
help=
|
20
|
-
'The stage we are calling download checkpoints from. Typically this would "upload" and will download checkpoints if config.yaml checkpoints section has when set to "upload". Other options include "runtime" to be used in load_model or "upload" to be used during model upload. Set this stage to whatever you have in config.yaml to force downloading now.'
|
36
|
+
help='The stage we are calling download checkpoints from. Typically this would "upload" and will download checkpoints if config.yaml checkpoints section has when set to "upload". Other options include "runtime" to be used in load_model or "upload" to be used during model upload. Set this stage to whatever you have in config.yaml to force downloading now.',
|
21
37
|
)
|
22
38
|
@click.option(
|
23
39
|
'--skip_dockerfile',
|
24
40
|
is_flag=True,
|
25
|
-
help=
|
26
|
-
'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.',
|
41
|
+
help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.',
|
27
42
|
)
|
28
43
|
def upload(model_path, stage, skip_dockerfile):
|
29
|
-
|
44
|
+
"""Upload a model to Clarifai.
|
30
45
|
|
31
46
|
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
32
|
-
|
33
|
-
|
34
|
-
|
47
|
+
"""
|
48
|
+
from clarifai.runners.models.model_builder import upload_model
|
49
|
+
|
50
|
+
upload_model(model_path, stage, skip_dockerfile)
|
35
51
|
|
36
52
|
|
37
53
|
@model.command()
|
@@ -46,8 +62,7 @@ def upload(model_path, stage, skip_dockerfile):
|
|
46
62
|
type=click.Path(exists=False),
|
47
63
|
required=False,
|
48
64
|
default=None,
|
49
|
-
help=
|
50
|
-
'Option path to write the checkpoints to. This will place them in {out_path}/1/checkpoints If not provided it will default to {model_path}/1/checkpoints where the config.yaml is read.'
|
65
|
+
help='Option path to write the checkpoints to. This will place them in {out_path}/1/checkpoints If not provided it will default to {model_path}/1/checkpoints where the config.yaml is read.',
|
51
66
|
)
|
52
67
|
@click.option(
|
53
68
|
'--stage',
|
@@ -55,18 +70,18 @@ def upload(model_path, stage, skip_dockerfile):
|
|
55
70
|
type=click.Choice(['runtime', 'build', 'upload'], case_sensitive=True),
|
56
71
|
default="build",
|
57
72
|
show_default=True,
|
58
|
-
help=
|
59
|
-
'The stage we are calling download checkpoints from. Typically this would be in the build stage which is the default. Other options include "runtime" to be used in load_model or "upload" to be used during model upload. Set this stage to whatever you have in config.yaml to force downloading now.'
|
73
|
+
help='The stage we are calling download checkpoints from. Typically this would be in the build stage which is the default. Other options include "runtime" to be used in load_model or "upload" to be used during model upload. Set this stage to whatever you have in config.yaml to force downloading now.',
|
60
74
|
)
|
61
75
|
def download_checkpoints(model_path, out_path, stage):
|
62
|
-
|
76
|
+
"""Download checkpoints from external source to local model_path
|
77
|
+
|
78
|
+
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
79
|
+
"""
|
63
80
|
|
64
|
-
|
65
|
-
"""
|
81
|
+
from clarifai.runners.models.model_builder import ModelBuilder
|
66
82
|
|
67
|
-
|
68
|
-
|
69
|
-
builder.download_checkpoints(stage=stage, checkpoint_path_override=out_path)
|
83
|
+
builder = ModelBuilder(model_path, download_validation_only=True)
|
84
|
+
builder.download_checkpoints(stage=stage, checkpoint_path_override=out_path)
|
70
85
|
|
71
86
|
|
72
87
|
@model.command()
|
@@ -81,75 +96,82 @@ def download_checkpoints(model_path, out_path, stage):
|
|
81
96
|
type=click.Path(exists=False),
|
82
97
|
required=False,
|
83
98
|
default=None,
|
84
|
-
help='Path to write the method signature defitions to. If not provided, use stdout.'
|
99
|
+
help='Path to write the method signature defitions to. If not provided, use stdout.',
|
100
|
+
)
|
85
101
|
def signatures(model_path, out_path):
|
86
|
-
|
102
|
+
"""Generate method signatures for the model.
|
103
|
+
|
104
|
+
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
105
|
+
"""
|
87
106
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
107
|
+
from clarifai.runners.models.model_builder import ModelBuilder
|
108
|
+
|
109
|
+
builder = ModelBuilder(model_path, download_validation_only=True)
|
110
|
+
signatures = builder.method_signatures_yaml()
|
111
|
+
if out_path:
|
112
|
+
with open(out_path, 'w') as f:
|
113
|
+
f.write(signatures)
|
114
|
+
else:
|
115
|
+
click.echo(signatures)
|
96
116
|
|
97
117
|
|
98
118
|
@model.command()
|
99
|
-
@click.
|
100
|
-
|
119
|
+
@click.argument(
|
120
|
+
"model_path",
|
101
121
|
type=click.Path(exists=True),
|
102
|
-
required=
|
103
|
-
|
122
|
+
required=False,
|
123
|
+
default=".",
|
124
|
+
)
|
104
125
|
@click.option(
|
105
126
|
'--mode',
|
106
127
|
type=click.Choice(['env', 'container'], case_sensitive=False),
|
107
128
|
default='env',
|
108
129
|
show_default=True,
|
109
|
-
help=
|
110
|
-
'Specify how to test the model locally: "env" for virtual environment or "container" for Docker container. Defaults to "env".'
|
130
|
+
help='Specify how to test the model locally: "env" for virtual environment or "container" for Docker container. Defaults to "env".',
|
111
131
|
)
|
112
132
|
@click.option(
|
113
133
|
'--keep_env',
|
114
134
|
is_flag=True,
|
115
|
-
help=
|
116
|
-
'Keep the virtual environment after testing the model locally (applicable for virtualenv mode). Defaults to False.'
|
135
|
+
help='Keep the virtual environment after testing the model locally (applicable for virtualenv mode). Defaults to False.',
|
117
136
|
)
|
118
137
|
@click.option(
|
119
138
|
'--keep_image',
|
120
139
|
is_flag=True,
|
121
|
-
help=
|
122
|
-
'Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.'
|
140
|
+
help='Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.',
|
123
141
|
)
|
124
142
|
@click.option(
|
125
143
|
'--skip_dockerfile',
|
126
144
|
is_flag=True,
|
127
|
-
help=
|
128
|
-
'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
|
145
|
+
help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
|
129
146
|
)
|
130
147
|
def test_locally(model_path, keep_env=False, keep_image=False, mode='env', skip_dockerfile=False):
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
148
|
+
"""Test model locally.
|
149
|
+
|
150
|
+
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
151
|
+
"""
|
152
|
+
try:
|
153
|
+
from clarifai.runners.models import model_run_locally
|
154
|
+
|
155
|
+
if mode == 'env' and keep_image:
|
156
|
+
raise ValueError("'keep_image' is applicable only for 'container' mode")
|
157
|
+
if mode == 'container' and keep_env:
|
158
|
+
raise ValueError("'keep_env' is applicable only for 'env' mode")
|
159
|
+
|
160
|
+
if mode == "env":
|
161
|
+
click.echo("Testing model locally in a virtual environment...")
|
162
|
+
model_run_locally.main(model_path, run_model_server=False, keep_env=keep_env)
|
163
|
+
elif mode == "container":
|
164
|
+
click.echo("Testing model locally inside a container...")
|
165
|
+
model_run_locally.main(
|
166
|
+
model_path,
|
167
|
+
inside_container=True,
|
168
|
+
run_model_server=False,
|
169
|
+
keep_image=keep_image,
|
170
|
+
skip_dockerfile=skip_dockerfile,
|
171
|
+
)
|
172
|
+
click.echo("Model tested successfully.")
|
173
|
+
except Exception as e:
|
174
|
+
click.echo(f"Failed to test model locally: {e}", err=True)
|
153
175
|
|
154
176
|
|
155
177
|
@model.command()
|
@@ -165,57 +187,59 @@ def test_locally(model_path, keep_env=False, keep_image=False, mode='env', skip_
|
|
165
187
|
type=int,
|
166
188
|
default=8000,
|
167
189
|
show_default=True,
|
168
|
-
help="The port to host the gRPC server for running the model locally. Defaults to 8000."
|
190
|
+
help="The port to host the gRPC server for running the model locally. Defaults to 8000.",
|
191
|
+
)
|
169
192
|
@click.option(
|
170
193
|
'--mode',
|
171
194
|
type=click.Choice(['env', 'container'], case_sensitive=False),
|
172
195
|
default='env',
|
173
196
|
show_default=True,
|
174
|
-
help=
|
175
|
-
'Specifies how to run the model: "env" for virtual environment or "container" for Docker container. Defaults to "env".'
|
197
|
+
help='Specifies how to run the model: "env" for virtual environment or "container" for Docker container. Defaults to "env".',
|
176
198
|
)
|
177
199
|
@click.option(
|
178
200
|
'--keep_env',
|
179
201
|
is_flag=True,
|
180
|
-
help=
|
181
|
-
'Keep the virtual environment after testing the model locally (applicable for virtualenv mode). Defaults to False.'
|
202
|
+
help='Keep the virtual environment after testing the model locally (applicable for virtualenv mode). Defaults to False.',
|
182
203
|
)
|
183
204
|
@click.option(
|
184
205
|
'--keep_image',
|
185
206
|
is_flag=True,
|
186
|
-
help=
|
187
|
-
'Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.'
|
207
|
+
help='Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.',
|
188
208
|
)
|
189
209
|
@click.option(
|
190
210
|
'--skip_dockerfile',
|
191
211
|
is_flag=True,
|
192
|
-
help=
|
193
|
-
'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
|
212
|
+
help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
|
194
213
|
)
|
195
214
|
def run_locally(model_path, port, mode, keep_env, keep_image, skip_dockerfile=False):
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
215
|
+
"""Run the model locally and start a gRPC server to serve the model.
|
216
|
+
|
217
|
+
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
218
|
+
"""
|
219
|
+
try:
|
220
|
+
from clarifai.runners.models import model_run_locally
|
221
|
+
|
222
|
+
if mode == 'env' and keep_image:
|
223
|
+
raise ValueError("'keep_image' is applicable only for 'container' mode")
|
224
|
+
if mode == 'container' and keep_env:
|
225
|
+
raise ValueError("'keep_env' is applicable only for 'env' mode")
|
226
|
+
|
227
|
+
if mode == "env":
|
228
|
+
click.echo("Running model locally in a virtual environment...")
|
229
|
+
model_run_locally.main(model_path, run_model_server=True, keep_env=keep_env, port=port)
|
230
|
+
elif mode == "container":
|
231
|
+
click.echo("Running model locally inside a container...")
|
232
|
+
model_run_locally.main(
|
233
|
+
model_path,
|
234
|
+
inside_container=True,
|
235
|
+
run_model_server=True,
|
236
|
+
port=port,
|
237
|
+
keep_image=keep_image,
|
238
|
+
skip_dockerfile=skip_dockerfile,
|
239
|
+
)
|
240
|
+
click.echo(f"Model server started locally from {model_path} in {mode} mode.")
|
241
|
+
except Exception as e:
|
242
|
+
click.echo(f"Failed to starts model server locally: {e}", err=True)
|
219
243
|
|
220
244
|
|
221
245
|
@model.command()
|
@@ -225,13 +249,324 @@ def run_locally(model_path, port, mode, keep_env, keep_image, skip_dockerfile=Fa
|
|
225
249
|
required=False,
|
226
250
|
default=".",
|
227
251
|
)
|
228
|
-
|
229
|
-
|
252
|
+
@click.pass_context
|
253
|
+
def local_dev(ctx, model_path):
|
254
|
+
"""Run the model as a local dev runner to help debug your model connected to the API or to
|
255
|
+
leverage local compute resources manually. This relies on many variables being present in the env
|
256
|
+
of the currently selected context. If they are not present then default values will be used to
|
257
|
+
ease the setup of a local dev runner and your context yaml will be updated in place. The required
|
258
|
+
env vars are:
|
259
|
+
|
260
|
+
\b
|
261
|
+
CLARIFAI_PAT:
|
262
|
+
|
263
|
+
\b
|
264
|
+
# for where the model that represents the local runner should be:
|
265
|
+
\b
|
266
|
+
CLARIFAI_USER_ID:
|
267
|
+
CLARIFAI_APP_ID:
|
268
|
+
CLARIFAI_MODEL_ID:
|
269
|
+
|
270
|
+
\b
|
271
|
+
# for where the local dev runner should be in a compute cluser
|
272
|
+
# note the user_id of the compute cluster is the same as the user_id of the model.
|
273
|
+
|
274
|
+
\b
|
275
|
+
CLARIFAI_COMPUTE_CLUSTER_ID:
|
276
|
+
CLARIFAI_NODEPOOL_ID:
|
277
|
+
|
278
|
+
# The following will be created in your context since it's generated by the API
|
279
|
+
|
280
|
+
CLARIFAI_RUNNER_ID:
|
281
|
+
|
282
|
+
|
283
|
+
Additionally using the provided model path, if the config.yaml file does not contain the model
|
284
|
+
information that matches the above CLARIFAI_USER_ID, CLARIFAI_APP_ID, CLARIFAI_MODEL_ID then the
|
285
|
+
config.yaml will be updated to include the model information. This is to ensure that the model
|
286
|
+
that starts up in the local dev runner is the same as the one you intend to call in the API.
|
287
|
+
|
288
|
+
MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
|
289
|
+
"""
|
290
|
+
from clarifai.client.user import User
|
291
|
+
from clarifai.runners.models.model_builder import ModelBuilder
|
292
|
+
from clarifai.runners.server import serve
|
293
|
+
|
294
|
+
validate_context(ctx)
|
295
|
+
logger.info("Checking setup for local development runner...")
|
296
|
+
logger.info(f"Current context: {ctx.obj.current.name}")
|
297
|
+
user_id = ctx.obj.current.user_id
|
298
|
+
user = User(user_id=user_id, pat=ctx.obj.current.pat, base_url=ctx.obj.current.api_base)
|
299
|
+
logger.info(f"Current user_id: {user_id}")
|
300
|
+
logger.debug("Checking if a local dev compute cluster exists...")
|
301
|
+
|
302
|
+
# see if ctx has CLARIFAI_COMPUTE_CLUSTER_ID, if not use default
|
303
|
+
try:
|
304
|
+
compute_cluster_id = ctx.obj.current.compute_cluster_id
|
305
|
+
except AttributeError:
|
306
|
+
compute_cluster_id = DEFAULT_LOCAL_DEV_COMPUTE_CLUSTER_ID
|
307
|
+
logger.info(f"Current compute_cluster_id: {compute_cluster_id}")
|
308
|
+
|
309
|
+
try:
|
310
|
+
compute_cluster = user.compute_cluster(compute_cluster_id)
|
311
|
+
if compute_cluster.cluster_type != 'local-dev':
|
312
|
+
raise ValueError(
|
313
|
+
f"Compute cluster {user_id}/{compute_cluster_id} is not a local-dev compute cluster. Please create a local-dev compute cluster."
|
314
|
+
)
|
315
|
+
try:
|
316
|
+
compute_cluster_id = ctx.obj.current.compute_cluster_id
|
317
|
+
except AttributeError: # doesn't exist in context but does in API then update the context.
|
318
|
+
ctx.obj.current.CLARIFAI_COMPUTE_CLUSTER_ID = compute_cluster.id
|
319
|
+
ctx.obj.to_yaml() # save to yaml file.
|
320
|
+
except ValueError:
|
321
|
+
raise
|
322
|
+
except Exception as e:
|
323
|
+
logger.info(f"Failed to get compute cluster with ID {compute_cluster_id}: {e}")
|
324
|
+
y = input(
|
325
|
+
f"Compute cluster not found. Do you want to create a new compute cluster {user_id}/{compute_cluster_id}? (y/n): "
|
326
|
+
)
|
327
|
+
if y.lower() != 'y':
|
328
|
+
raise click.Abort()
|
329
|
+
# Create a compute cluster with default configuration for local dev.
|
330
|
+
compute_cluster = user.create_compute_cluster(
|
331
|
+
compute_cluster_id=compute_cluster_id,
|
332
|
+
compute_cluster_config=DEFAULT_LOCAL_DEV_COMPUTE_CLUSTER_CONFIG,
|
333
|
+
)
|
334
|
+
ctx.obj.current.CLARIFAI_COMPUTE_CLUSTER_ID = compute_cluster_id
|
335
|
+
ctx.obj.to_yaml() # save to yaml file.
|
336
|
+
|
337
|
+
# Now check if there is a nodepool created in this compute cluser
|
338
|
+
try:
|
339
|
+
nodepool_id = ctx.obj.current.nodepool_id
|
340
|
+
except AttributeError:
|
341
|
+
nodepool_id = DEFAULT_LOCAL_DEV_NODEPOOL_ID
|
342
|
+
logger.info(f"Current nodepool_id: {nodepool_id}")
|
343
|
+
|
344
|
+
try:
|
345
|
+
nodepool = compute_cluster.nodepool(nodepool_id)
|
346
|
+
try:
|
347
|
+
nodepool_id = ctx.obj.current.nodepool_id
|
348
|
+
except AttributeError: # doesn't exist in context but does in API then update the context.
|
349
|
+
ctx.obj.current.CLARIFAI_NODEPOOL_ID = nodepool.id
|
350
|
+
ctx.obj.to_yaml() # save to yaml file.
|
351
|
+
except Exception as e:
|
352
|
+
logger.info(f"Failed to get nodepool with ID {nodepool_id}: {e}")
|
353
|
+
y = input(
|
354
|
+
f"Nodepool not found. Do you want to create a new nodepool {user_id}/{compute_cluster_id}/{nodepool_id}? (y/n): "
|
355
|
+
)
|
356
|
+
if y.lower() != 'y':
|
357
|
+
raise click.Abort()
|
358
|
+
nodepool = compute_cluster.create_nodepool(
|
359
|
+
nodepool_config=DEFAULT_LOCAL_DEV_NODEPOOL_CONFIG, nodepool_id=nodepool_id
|
360
|
+
)
|
361
|
+
ctx.obj.current.CLARIFAI_NODEPOOL_ID = nodepool_id
|
362
|
+
ctx.obj.to_yaml() # save to yaml file.
|
363
|
+
|
364
|
+
logger.debug("Checking if model is created to call for local development...")
|
365
|
+
# see if ctx has CLARIFAI_APP_ID, if not use default
|
366
|
+
try:
|
367
|
+
app_id = ctx.obj.current.app_id
|
368
|
+
except AttributeError:
|
369
|
+
app_id = DEFAULT_LOCAL_DEV_APP_ID
|
370
|
+
logger.info(f"Current app_id: {app_id}")
|
371
|
+
|
372
|
+
try:
|
373
|
+
app = user.app(app_id)
|
374
|
+
try:
|
375
|
+
app_id = ctx.obj.current.app_id
|
376
|
+
except AttributeError: # doesn't exist in context but does in API then update the context.
|
377
|
+
ctx.obj.current.CLARIFAI_APP_ID = app.id
|
378
|
+
ctx.obj.to_yaml() # save to yaml file.
|
379
|
+
except Exception as e:
|
380
|
+
logger.info(f"Failed to get app with ID {app_id}: {e}")
|
381
|
+
y = input(f"App not found. Do you want to create a new app {user_id}/{app_id}? (y/n): ")
|
382
|
+
if y.lower() != 'y':
|
383
|
+
raise click.Abort()
|
384
|
+
app = user.create_app(app_id)
|
385
|
+
ctx.obj.current.CLARIFAI_APP_ID = app_id
|
386
|
+
ctx.obj.to_yaml() # save to yaml file.
|
387
|
+
|
388
|
+
# Within this app we now need a model to call as the local dev runner.
|
389
|
+
try:
|
390
|
+
model_id = ctx.obj.current.model_id
|
391
|
+
except AttributeError:
|
392
|
+
model_id = DEFAULT_LOCAL_DEV_MODEL_ID
|
393
|
+
logger.info(f"Current model_id: {model_id}")
|
230
394
|
|
231
|
-
|
232
|
-
|
233
|
-
|
234
|
-
|
395
|
+
try:
|
396
|
+
model = app.model(model_id)
|
397
|
+
try:
|
398
|
+
model_id = ctx.obj.current.model_id
|
399
|
+
except AttributeError: # doesn't exist in context but does in API then update the context.
|
400
|
+
ctx.obj.current.CLARIFAI_MODEL_ID = model.id
|
401
|
+
ctx.obj.to_yaml() # save to yaml file.
|
402
|
+
except Exception as e:
|
403
|
+
logger.info(f"Failed to get model with ID {model_id}: {e}")
|
404
|
+
y = input(
|
405
|
+
f"Model not found. Do you want to create a new model {user_id}/{app_id}/models/{model_id}? (y/n): "
|
406
|
+
)
|
407
|
+
if y.lower() != 'y':
|
408
|
+
raise click.Abort()
|
409
|
+
model = app.create_model(model_id, model_type_id=DEFAULT_LOCAL_DEV_MODEL_TYPE)
|
410
|
+
ctx.obj.current.CLARIFAI_MODEL_ID = model_id
|
411
|
+
ctx.obj.to_yaml() # save to yaml file.
|
412
|
+
|
413
|
+
# Now we need to create a version for the model if no version exists. Only need one version that
|
414
|
+
# mentions it's a local dev runner.
|
415
|
+
model_versions = [v for v in model.list_versions()]
|
416
|
+
if len(model_versions) == 0:
|
417
|
+
logger.info("No model versions found. Creating a new version for local dev runner.")
|
418
|
+
version = model.create_version(pretrained_model_config={"local_dev": True}).model_version
|
419
|
+
logger.info(f"Created model version {version.id}")
|
420
|
+
else:
|
421
|
+
version = model_versions[0].model_version
|
422
|
+
|
423
|
+
logger.info(f"Current model version {version.id}")
|
424
|
+
|
425
|
+
worker = {
|
426
|
+
"model": {
|
427
|
+
"id": f"{model.id}",
|
428
|
+
"model_version": {
|
429
|
+
"id": f"{version.id}",
|
430
|
+
},
|
431
|
+
"user_id": f"{user_id}",
|
432
|
+
"app_id": f"{app_id}",
|
433
|
+
},
|
434
|
+
}
|
435
|
+
|
436
|
+
try:
|
437
|
+
# if it's already in our context then we'll re-use the same one.
|
438
|
+
# note these are UUIDs, we cannot provide a runner ID.
|
439
|
+
runner_id = ctx.obj.current.runner_id
|
440
|
+
|
441
|
+
try:
|
442
|
+
runner = nodepool.runner(runner_id)
|
443
|
+
except Exception as e:
|
444
|
+
raise AttributeError("Runner not found in nodepool.") from e
|
445
|
+
except AttributeError:
|
446
|
+
logger.info(
|
447
|
+
f"Create the local dev runner tying this\n {user_id}/{app_id}/models/{model.id} model (version: {version.id}) to the\n {user_id}/{compute_cluster_id}/{nodepool_id} nodepool."
|
448
|
+
)
|
449
|
+
runner = nodepool.create_runner(
|
450
|
+
runner_config={
|
451
|
+
"runner": {
|
452
|
+
"description": "Local dev runner for model testing",
|
453
|
+
"worker": worker,
|
454
|
+
"num_replicas": 1,
|
455
|
+
}
|
456
|
+
}
|
457
|
+
)
|
458
|
+
runner_id = runner.id
|
459
|
+
ctx.obj.current.CLARIFAI_RUNNER_ID = runner.id
|
460
|
+
ctx.obj.to_yaml()
|
461
|
+
|
462
|
+
logger.info(f"Current runner_id: {runner_id}")
|
463
|
+
|
464
|
+
# To make it easier to call the model without specifying a runner selector
|
465
|
+
# we will also create a deployment tying the model to the nodepool.
|
466
|
+
try:
|
467
|
+
deployment_id = ctx.obj.current.deployment_id
|
468
|
+
except AttributeError:
|
469
|
+
deployment_id = DEFAULT_LOCAL_DEV_DEPLOYMENT_ID
|
470
|
+
try:
|
471
|
+
deployment = nodepool.deployment(deployment_id)
|
472
|
+
try:
|
473
|
+
deployment_id = ctx.obj.current.deployment_id
|
474
|
+
except AttributeError: # doesn't exist in context but does in API then update the context.
|
475
|
+
ctx.obj.current.CLARIFAI_DEPLOYMENT_ID = deployment.id
|
476
|
+
ctx.obj.to_yaml() # save to yaml file.
|
477
|
+
except Exception as e:
|
478
|
+
logger.info(f"Failed to get deployment with ID {deployment_id}: {e}")
|
479
|
+
y = input(
|
480
|
+
f"Deployment not found. Do you want to create a new deployment {user_id}/{compute_cluster_id}/{nodepool_id}/{deployment_id}? (y/n): "
|
481
|
+
)
|
482
|
+
if y.lower() != 'y':
|
483
|
+
raise click.Abort()
|
484
|
+
nodepool.create_deployment(
|
485
|
+
deployment_id=deployment_id,
|
486
|
+
deployment_config={
|
487
|
+
"deployment": {
|
488
|
+
"scheduling_choice": 3, # 3 means by price
|
489
|
+
"worker": worker,
|
490
|
+
"nodepools": [
|
491
|
+
{
|
492
|
+
"id": f"{nodepool_id}",
|
493
|
+
"compute_cluster": {
|
494
|
+
"id": f"{compute_cluster_id}",
|
495
|
+
"user_id": f"{user_id}",
|
496
|
+
},
|
497
|
+
}
|
498
|
+
],
|
499
|
+
}
|
500
|
+
},
|
501
|
+
)
|
502
|
+
ctx.obj.current.CLARIFAI_DEPLOYMENT_ID = deployment_id
|
503
|
+
ctx.obj.to_yaml() # save to yaml file.
|
504
|
+
|
505
|
+
logger.info(f"Current deployment_id: {deployment_id}")
|
506
|
+
|
507
|
+
# Now that we have all the context in ctx.obj, we need to update the config.yaml in
|
508
|
+
# the model_path directory with the model object containing user_id, app_id, model_id, version_id
|
509
|
+
config_file = os.path.join(model_path, 'config.yaml')
|
510
|
+
if not os.path.exists(config_file):
|
511
|
+
raise ValueError(
|
512
|
+
f"config.yaml not found in {model_path}. Please ensure you are passing the correct directory."
|
513
|
+
)
|
514
|
+
config = ModelBuilder._load_config(config_file)
|
515
|
+
# The config.yaml doens't match what we created above.
|
516
|
+
if 'model' in config and model_id != config['model'].get('id'):
|
517
|
+
logger.info(f"Current model section of config.yaml: {config.get('model', {})}")
|
518
|
+
y = input(
|
519
|
+
"Do you want to backup config.yaml to config.yaml.bk then update the config.yaml with the new model information? (y/n): "
|
520
|
+
)
|
521
|
+
if y.lower() != 'y':
|
522
|
+
raise click.Abort()
|
523
|
+
config = ModelBuilder._set_local_dev_model(
|
524
|
+
config, user_id, app_id, model_id, DEFAULT_LOCAL_DEV_MODEL_TYPE
|
525
|
+
)
|
526
|
+
ModelBuilder._backup_config(config_file)
|
527
|
+
ModelBuilder._save_config(config_file, config)
|
528
|
+
|
529
|
+
# client_model = Model(
|
530
|
+
# TODO: once we can generate_client_script from ModelBuilder or similar
|
531
|
+
# we should be able to put the exact function call in place.
|
532
|
+
# model_script = model.generate_client_script()
|
533
|
+
|
534
|
+
builder = ModelBuilder(model_path, download_validation_only=True)
|
535
|
+
method_signatures = builder.get_method_signatures()
|
536
|
+
|
537
|
+
from clarifai.runners.utils import code_script
|
538
|
+
|
539
|
+
snippet = code_script.generate_client_script(
|
540
|
+
method_signatures,
|
541
|
+
user_id=user_id,
|
542
|
+
app_id=app_id,
|
543
|
+
model_id=model_id,
|
544
|
+
deployment_id=deployment_id,
|
545
|
+
use_ctx=True,
|
546
|
+
base_url=ctx.obj.current.api_base,
|
547
|
+
)
|
548
|
+
|
549
|
+
# TODO: put in the ClarifaiUrlHelper to create the model url.
|
550
|
+
|
551
|
+
logger.info("""\n
|
552
|
+
XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
|
553
|
+
# About to start up the local dev runner in this terminal...
|
554
|
+
# Here is a code snippet to call this model once it start from another terminal:
|
555
|
+
""")
|
556
|
+
logger.info(snippet)
|
557
|
+
|
558
|
+
logger.info("Now starting the local dev runner...")
|
559
|
+
|
560
|
+
# This reads the config.yaml from the model_path so we alter it above first.
|
561
|
+
serve(
|
562
|
+
model_path,
|
563
|
+
user_id=user_id,
|
564
|
+
compute_cluster_id=compute_cluster_id,
|
565
|
+
nodepool_id=nodepool_id,
|
566
|
+
runner_id=runner_id,
|
567
|
+
base_url=ctx.obj.current.api_base,
|
568
|
+
pat=ctx.obj.current.pat,
|
569
|
+
)
|
235
570
|
|
236
571
|
|
237
572
|
@model.command()
|
@@ -239,7 +574,8 @@ def local_dev(model_path):
|
|
239
574
|
'--config',
|
240
575
|
type=click.Path(exists=True),
|
241
576
|
required=False,
|
242
|
-
help='Path to the model predict config file.'
|
577
|
+
help='Path to the model predict config file.',
|
578
|
+
)
|
243
579
|
@click.option('--model_id', required=False, help='Model ID of the model used to predict.')
|
244
580
|
@click.option('--user_id', required=False, help='User ID of the model used to predict.')
|
245
581
|
@click.option('--app_id', required=False, help='App ID of the model used to predict.')
|
@@ -252,85 +588,147 @@ def local_dev(model_path):
|
|
252
588
|
'-cc_id',
|
253
589
|
'--compute_cluster_id',
|
254
590
|
required=False,
|
255
|
-
help='Compute Cluster ID to use for the model'
|
591
|
+
help='Compute Cluster ID to use for the model',
|
592
|
+
)
|
256
593
|
@click.option('-np_id', '--nodepool_id', required=False, help='Nodepool ID to use for the model')
|
257
594
|
@click.option(
|
258
|
-
'-dpl_id', '--deployment_id', required=False, help='Deployment ID to use for the model'
|
595
|
+
'-dpl_id', '--deployment_id', required=False, help='Deployment ID to use for the model'
|
596
|
+
)
|
259
597
|
@click.option(
|
260
|
-
'--inference_params', required=False, default='{}', help='Inference parameters to override'
|
598
|
+
'--inference_params', required=False, default='{}', help='Inference parameters to override'
|
599
|
+
)
|
261
600
|
@click.option('--output_config', required=False, default='{}', help='Output config to override')
|
262
601
|
@click.pass_context
|
263
|
-
def predict(
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
274
|
-
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
281
|
-
|
282
|
-
|
283
|
-
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
if
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
|
294
|
-
|
295
|
-
|
296
|
-
|
297
|
-
|
298
|
-
|
299
|
-
|
300
|
-
|
301
|
-
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
|
311
|
-
|
312
|
-
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
|
317
|
-
|
318
|
-
|
319
|
-
|
320
|
-
|
321
|
-
|
322
|
-
|
323
|
-
|
324
|
-
|
325
|
-
|
326
|
-
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
|
332
|
-
|
333
|
-
|
334
|
-
|
335
|
-
|
336
|
-
|
602
|
+
def predict(
|
603
|
+
ctx,
|
604
|
+
config,
|
605
|
+
model_id,
|
606
|
+
user_id,
|
607
|
+
app_id,
|
608
|
+
model_url,
|
609
|
+
file_path,
|
610
|
+
url,
|
611
|
+
bytes,
|
612
|
+
input_type,
|
613
|
+
compute_cluster_id,
|
614
|
+
nodepool_id,
|
615
|
+
deployment_id,
|
616
|
+
inference_params,
|
617
|
+
output_config,
|
618
|
+
):
|
619
|
+
"""Predict using the given model"""
|
620
|
+
import json
|
621
|
+
|
622
|
+
from clarifai.client.model import Model
|
623
|
+
from clarifai.utils.cli import from_yaml, validate_context
|
624
|
+
|
625
|
+
validate_context(ctx)
|
626
|
+
if config:
|
627
|
+
config = from_yaml(config)
|
628
|
+
(
|
629
|
+
model_id,
|
630
|
+
user_id,
|
631
|
+
app_id,
|
632
|
+
model_url,
|
633
|
+
file_path,
|
634
|
+
url,
|
635
|
+
bytes,
|
636
|
+
input_type,
|
637
|
+
compute_cluster_id,
|
638
|
+
nodepool_id,
|
639
|
+
deployment_id,
|
640
|
+
inference_params,
|
641
|
+
output_config,
|
642
|
+
) = (
|
643
|
+
config.get(k, v)
|
644
|
+
for k, v in [
|
645
|
+
('model_id', model_id),
|
646
|
+
('user_id', user_id),
|
647
|
+
('app_id', app_id),
|
648
|
+
('model_url', model_url),
|
649
|
+
('file_path', file_path),
|
650
|
+
('url', url),
|
651
|
+
('bytes', bytes),
|
652
|
+
('input_type', input_type),
|
653
|
+
('compute_cluster_id', compute_cluster_id),
|
654
|
+
('nodepool_id', nodepool_id),
|
655
|
+
('deployment_id', deployment_id),
|
656
|
+
('inference_params', inference_params),
|
657
|
+
('output_config', output_config),
|
658
|
+
]
|
659
|
+
)
|
660
|
+
if (
|
661
|
+
sum(
|
662
|
+
[
|
663
|
+
opt[1]
|
664
|
+
for opt in [(model_id, 1), (user_id, 1), (app_id, 1), (model_url, 3)]
|
665
|
+
if opt[0]
|
666
|
+
]
|
667
|
+
)
|
668
|
+
!= 3
|
669
|
+
):
|
670
|
+
raise ValueError(
|
671
|
+
"Either --model_id & --user_id & --app_id or --model_url must be provided."
|
672
|
+
)
|
673
|
+
if compute_cluster_id or nodepool_id or deployment_id:
|
674
|
+
if (
|
675
|
+
sum(
|
676
|
+
[
|
677
|
+
opt[1]
|
678
|
+
for opt in [(compute_cluster_id, 0.5), (nodepool_id, 0.5), (deployment_id, 1)]
|
679
|
+
if opt[0]
|
680
|
+
]
|
681
|
+
)
|
682
|
+
!= 1
|
683
|
+
):
|
684
|
+
raise ValueError(
|
685
|
+
"Either --compute_cluster_id & --nodepool_id or --deployment_id must be provided."
|
686
|
+
)
|
687
|
+
if model_url:
|
688
|
+
model = Model(url=model_url, pat=ctx.obj['pat'], base_url=ctx.obj['base_url'])
|
689
|
+
else:
|
690
|
+
model = Model(
|
691
|
+
model_id=model_id,
|
692
|
+
user_id=user_id,
|
693
|
+
app_id=app_id,
|
694
|
+
pat=ctx.obj['pat'],
|
695
|
+
base_url=ctx.obj['base_url'],
|
696
|
+
)
|
697
|
+
|
698
|
+
if inference_params:
|
699
|
+
inference_params = json.loads(inference_params)
|
700
|
+
if output_config:
|
701
|
+
output_config = json.loads(output_config)
|
702
|
+
|
703
|
+
if file_path:
|
704
|
+
model_prediction = model.predict_by_filepath(
|
705
|
+
filepath=file_path,
|
706
|
+
input_type=input_type,
|
707
|
+
compute_cluster_id=compute_cluster_id,
|
708
|
+
nodepool_id=nodepool_id,
|
709
|
+
deployment_id=deployment_id,
|
710
|
+
inference_params=inference_params,
|
711
|
+
output_config=output_config,
|
712
|
+
)
|
713
|
+
elif url:
|
714
|
+
model_prediction = model.predict_by_url(
|
715
|
+
url=url,
|
716
|
+
input_type=input_type,
|
717
|
+
compute_cluster_id=compute_cluster_id,
|
718
|
+
nodepool_id=nodepool_id,
|
719
|
+
deployment_id=deployment_id,
|
720
|
+
inference_params=inference_params,
|
721
|
+
output_config=output_config,
|
722
|
+
)
|
723
|
+
elif bytes:
|
724
|
+
bytes = str.encode(bytes)
|
725
|
+
model_prediction = model.predict_by_bytes(
|
726
|
+
input_bytes=bytes,
|
727
|
+
input_type=input_type,
|
728
|
+
compute_cluster_id=compute_cluster_id,
|
729
|
+
nodepool_id=nodepool_id,
|
730
|
+
deployment_id=deployment_id,
|
731
|
+
inference_params=inference_params,
|
732
|
+
output_config=output_config,
|
733
|
+
) ## TO DO: Add support for input_id
|
734
|
+
click.echo(model_prediction)
|