clarifai 11.6.4rc1__py3-none-any.whl → 11.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/README.md +39 -0
- clarifai/cli/base.py +107 -118
- clarifai/cli/model.py +62 -79
- clarifai/client/app.py +1 -1
- clarifai/client/auth/helper.py +7 -2
- clarifai/client/base.py +35 -8
- clarifai/client/compute_cluster.py +1 -1
- clarifai/client/model.py +3 -1
- clarifai/client/nodepool.py +4 -2
- clarifai/client/user.py +4 -2
- clarifai/runners/models/model_runner.py +55 -0
- clarifai/runners/server.py +1 -0
- clarifai/runners/utils/code_script.py +52 -46
- clarifai/runners/utils/url_fetcher.py +51 -12
- clarifai/utils/cli.py +127 -0
- clarifai/utils/constants.py +5 -0
- clarifai/utils/misc.py +49 -1
- {clarifai-11.6.4rc1.dist-info → clarifai-11.6.5.dist-info}/METADATA +2 -2
- clarifai-11.6.5.dist-info/RECORD +127 -0
- {clarifai-11.6.4rc1.dist-info → clarifai-11.6.5.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-311.pyc +0 -0
- clarifai/__pycache__/errors.cpython-39.pyc +0 -0
- clarifai/__pycache__/versions.cpython-311.pyc +0 -0
- clarifai/__pycache__/versions.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-39.pyc +0 -0
- clarifai/cli/model_templates.py +0 -243
- clarifai/cli/pipeline_step_templates.py +0 -64
- clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/model_client.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model_client.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/runner.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-39.pyc +0 -0
- clarifai/models/model_serving/README.md +0 -158
- clarifai/models/model_serving/__init__.py +0 -14
- clarifai/models/model_serving/cli/__init__.py +0 -12
- clarifai/models/model_serving/cli/_utils.py +0 -53
- clarifai/models/model_serving/cli/base.py +0 -14
- clarifai/models/model_serving/cli/build.py +0 -79
- clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
- clarifai/models/model_serving/cli/create.py +0 -171
- clarifai/models/model_serving/cli/example_cli.py +0 -34
- clarifai/models/model_serving/cli/login.py +0 -26
- clarifai/models/model_serving/cli/upload.py +0 -179
- clarifai/models/model_serving/constants.py +0 -21
- clarifai/models/model_serving/docs/cli.md +0 -161
- clarifai/models/model_serving/docs/concepts.md +0 -229
- clarifai/models/model_serving/docs/dependencies.md +0 -11
- clarifai/models/model_serving/docs/inference_parameters.md +0 -139
- clarifai/models/model_serving/docs/model_types.md +0 -19
- clarifai/models/model_serving/model_config/__init__.py +0 -16
- clarifai/models/model_serving/model_config/base.py +0 -369
- clarifai/models/model_serving/model_config/config.py +0 -312
- clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
- clarifai/models/model_serving/model_config/output.py +0 -133
- clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
- clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
- clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
- clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
- clarifai/models/model_serving/repo_build/__init__.py +0 -14
- clarifai/models/model_serving/repo_build/build.py +0 -198
- clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
- clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
- clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
- clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
- clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
- clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
- clarifai/models/model_serving/utils.py +0 -23
- clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/mcp_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/openai_class.cpython-311.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/code_script.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/code_script.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/method_signatures.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/model_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/openai_convertor.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/serializers.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -231
- clarifai/runners/utils/data_types/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/data_types.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/data_types.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_types.py +0 -471
- clarifai/runners/utils/temp.py +0 -59
- clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
- clarifai-11.6.4rc1.dist-info/RECORD +0 -301
- {clarifai-11.6.4rc1.dist-info → clarifai-11.6.5.dist-info}/entry_points.txt +0 -0
- {clarifai-11.6.4rc1.dist-info → clarifai-11.6.5.dist-info}/licenses/LICENSE +0 -0
- {clarifai-11.6.4rc1.dist-info → clarifai-11.6.5.dist-info}/top_level.txt +0 -0
clarifai/client/auth/helper.py
CHANGED
@@ -2,6 +2,7 @@ import os
|
|
2
2
|
import urllib.request
|
3
3
|
from typing import Any, Dict
|
4
4
|
|
5
|
+
import grpc
|
5
6
|
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
|
6
7
|
from clarifai_grpc.grpc.api import resources_pb2, service_pb2_grpc
|
7
8
|
|
@@ -289,7 +290,11 @@ class ClarifaiAuthHelper:
|
|
289
290
|
)
|
290
291
|
|
291
292
|
def get_stub(self) -> service_pb2_grpc.V2Stub:
|
292
|
-
|
293
|
+
stub, channel = self.get_stub_and_channel()
|
294
|
+
return stub
|
295
|
+
|
296
|
+
def get_stub_and_channel(self) -> tuple[service_pb2_grpc.V2Stub, grpc.Channel]:
|
297
|
+
"""Get the API gRPC stub and channel based on the API endpoint base.
|
293
298
|
|
294
299
|
Returns:
|
295
300
|
stub: The service_pb2_grpc.V2Stub stub for the API.
|
@@ -310,7 +315,7 @@ class ClarifaiAuthHelper:
|
|
310
315
|
port = 80
|
311
316
|
channel = ClarifaiChannel.get_insecure_grpc_channel(base=host, port=port)
|
312
317
|
stub = service_pb2_grpc.V2Stub(channel)
|
313
|
-
return stub
|
318
|
+
return stub, channel
|
314
319
|
|
315
320
|
def get_async_stub(self) -> service_pb2_grpc.V2Stub:
|
316
321
|
"""Get the API gRPC async stub using the right channel based on the API endpoint base.
|
clarifai/client/base.py
CHANGED
@@ -11,7 +11,7 @@ from clarifai.client.auth.helper import ClarifaiAuthHelper
|
|
11
11
|
from clarifai.constants.base import COMPUTE_ORCHESTRATION_RESOURCES
|
12
12
|
from clarifai.errors import ApiError, UserError
|
13
13
|
from clarifai.utils.constants import CLARIFAI_PAT_ENV_VAR, CLARIFAI_SESSION_TOKEN_ENV_VAR
|
14
|
-
from clarifai.utils.misc import
|
14
|
+
from clarifai.utils.misc import get_from_dict_env_or_config
|
15
15
|
|
16
16
|
|
17
17
|
class BaseClient:
|
@@ -39,15 +39,42 @@ class BaseClient:
|
|
39
39
|
def __init__(self, **kwargs):
|
40
40
|
token, pat = "", ""
|
41
41
|
try:
|
42
|
-
pat =
|
42
|
+
pat = get_from_dict_env_or_config(key="pat", env_key=CLARIFAI_PAT_ENV_VAR, **kwargs)
|
43
43
|
except UserError:
|
44
|
-
|
45
|
-
|
46
|
-
|
44
|
+
try:
|
45
|
+
token = get_from_dict_env_or_config(
|
46
|
+
key="token", env_key=CLARIFAI_SESSION_TOKEN_ENV_VAR, **kwargs
|
47
|
+
)
|
48
|
+
except UserError:
|
49
|
+
pass
|
47
50
|
finally:
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
+
if not (token or pat):
|
52
|
+
raise UserError(
|
53
|
+
"Authentication Required. Please authenticate in one of the following ways:\n\n"
|
54
|
+
"- Pass your Personal Access Token ('pat') or session token ('token') as arguments to your function.\n"
|
55
|
+
"- Set the CLARIFAI_PAT or CLARIFAI_SESSION_TOKEN environment variables in your environment.\n"
|
56
|
+
"- Run `clarifai login` in your terminal to configure CLI authentication."
|
57
|
+
)
|
58
|
+
|
59
|
+
# Also try to get user_id and base from CLI config if not provided
|
60
|
+
if not kwargs.get('user_id'):
|
61
|
+
try:
|
62
|
+
user_id = get_from_dict_env_or_config(
|
63
|
+
key="user_id", env_key="CLARIFAI_USER_ID", **kwargs
|
64
|
+
)
|
65
|
+
kwargs['user_id'] = user_id
|
66
|
+
except UserError:
|
67
|
+
pass # user_id is optional for some use cases
|
68
|
+
|
69
|
+
if not kwargs.get('base'):
|
70
|
+
try:
|
71
|
+
base = get_from_dict_env_or_config(
|
72
|
+
key="base", env_key="CLARIFAI_API_BASE", **kwargs
|
73
|
+
)
|
74
|
+
kwargs['base'] = base
|
75
|
+
except UserError:
|
76
|
+
pass # base has a default value
|
77
|
+
|
51
78
|
kwargs.update({'token': token, 'pat': pat})
|
52
79
|
|
53
80
|
self.auth_helper = ClarifaiAuthHelper(**kwargs, validate=False)
|
@@ -165,7 +165,7 @@ class ComputeCluster(Lister, BaseClient):
|
|
165
165
|
response = self._grpc_request(self.STUB.PostNodepools, request)
|
166
166
|
if response.status.code != status_code_pb2.SUCCESS:
|
167
167
|
raise Exception(response.status)
|
168
|
-
self.logger.info("
|
168
|
+
self.logger.info(f"Nodepool with ID '{nodepool_id}' is created:\n{response.status}")
|
169
169
|
|
170
170
|
dict_response = MessageToDict(response.nodepools[0], preserving_proto_field_name=True)
|
171
171
|
kwargs = self.process_response_keys(dict_response, 'nodepool')
|
clarifai/client/model.py
CHANGED
@@ -445,7 +445,9 @@ class Model(Lister, BaseClient):
|
|
445
445
|
response = self._grpc_request(self.STUB.PostModelVersions, request)
|
446
446
|
if response.status.code != status_code_pb2.SUCCESS:
|
447
447
|
raise Exception(response.status)
|
448
|
-
self.logger.info(
|
448
|
+
self.logger.info(
|
449
|
+
f"Model Version with ID '{response.model.model_version.id}' is created:\n{response.status}"
|
450
|
+
)
|
449
451
|
|
450
452
|
kwargs.update({'app_id': self.app_id, 'user_id': self.user_id})
|
451
453
|
dict_response = MessageToDict(response, preserving_proto_field_name=True)
|
clarifai/client/nodepool.py
CHANGED
@@ -195,7 +195,9 @@ class Nodepool(Lister, BaseClient):
|
|
195
195
|
response = self._grpc_request(self.STUB.PostDeployments, request)
|
196
196
|
if response.status.code != status_code_pb2.SUCCESS:
|
197
197
|
raise Exception(response.status)
|
198
|
-
self.logger.info(
|
198
|
+
self.logger.info(
|
199
|
+
f"Deployment with ID '{response.deployments[0].id}' is created:\n{response.status}"
|
200
|
+
)
|
199
201
|
|
200
202
|
dict_response = MessageToDict(
|
201
203
|
response.deployments[0], preserving_proto_field_name=True, use_integers_for_enums=True
|
@@ -324,7 +326,7 @@ class Nodepool(Lister, BaseClient):
|
|
324
326
|
if response.status.code != status_code_pb2.SUCCESS:
|
325
327
|
raise Exception(response.status)
|
326
328
|
self.logger.info(
|
327
|
-
"
|
329
|
+
f"Runner with ID '{response.runners[0].id}' is created:\n{response.status}"
|
328
330
|
)
|
329
331
|
|
330
332
|
dict_response = MessageToDict(response.runners[0], preserving_proto_field_name=True)
|
clarifai/client/user.py
CHANGED
@@ -177,7 +177,7 @@ class User(Lister, BaseClient):
|
|
177
177
|
response = self._grpc_request(self.STUB.PostApps, request)
|
178
178
|
if response.status.code != status_code_pb2.SUCCESS:
|
179
179
|
raise Exception(response.status)
|
180
|
-
self.logger.info("
|
180
|
+
self.logger.info(f"App with ID '{app_id}' is created:\n{response.status}")
|
181
181
|
return App.from_auth_helper(auth=self.auth_helper, app_id=app_id)
|
182
182
|
|
183
183
|
def _process_compute_cluster_config(
|
@@ -255,7 +255,9 @@ class User(Lister, BaseClient):
|
|
255
255
|
response = self._grpc_request(self.STUB.PostComputeClusters, request)
|
256
256
|
if response.status.code != status_code_pb2.SUCCESS:
|
257
257
|
raise Exception(response.status)
|
258
|
-
self.logger.info(
|
258
|
+
self.logger.info(
|
259
|
+
f"Compute Cluster with ID '{compute_cluster_id}' is created:\n{response.status}"
|
260
|
+
)
|
259
261
|
return ComputeCluster.from_auth_helper(
|
260
262
|
auth=self.auth_helper, compute_cluster_id=compute_cluster_id
|
261
263
|
)
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import time
|
1
2
|
from typing import Iterator
|
2
3
|
|
3
4
|
from clarifai_grpc.grpc.api import service_pb2
|
@@ -6,6 +7,8 @@ from clarifai_protocol import BaseRunner
|
|
6
7
|
from clarifai_protocol.utils.health import HealthProbeRequestHandler
|
7
8
|
|
8
9
|
from clarifai.client.auth.helper import ClarifaiAuthHelper
|
10
|
+
from clarifai.utils.constants import STATUS_FAIL, STATUS_MIXED, STATUS_OK, STATUS_UNKNOWN
|
11
|
+
from clarifai.utils.logging import get_req_id_from_context, logger
|
9
12
|
|
10
13
|
from ..utils.url_fetcher import ensure_urls_downloaded
|
11
14
|
from .model_class import ModelClass
|
@@ -106,6 +109,20 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
106
109
|
raise Exception("Unexpected work item type: {}".format(runner_item))
|
107
110
|
request = runner_item.post_model_outputs_request
|
108
111
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
112
|
+
start_time = time.time()
|
113
|
+
req_id = get_req_id_from_context()
|
114
|
+
status_str = STATUS_UNKNOWN
|
115
|
+
# Endpoint is always POST /v2/.../outputs for this runner
|
116
|
+
endpoint = "POST /v2/.../outputs "
|
117
|
+
|
118
|
+
# if method_name == '_GET_SIGNATURES' then the request is for getting signatures and we don't want to log it.
|
119
|
+
# This is a workaround to avoid logging the _GET_SIGNATURES method call.
|
120
|
+
method_name = None
|
121
|
+
logging = True
|
122
|
+
if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
|
123
|
+
method_name = request.inputs[0].data.metadata['_method_name']
|
124
|
+
if method_name == '_GET_SIGNATURES':
|
125
|
+
logging = False
|
109
126
|
|
110
127
|
resp = self.model.predict_wrapper(request)
|
111
128
|
# if we have any non-successful code already it's an error we can return.
|
@@ -113,6 +130,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
113
130
|
resp.status.code != status_code_pb2.SUCCESS
|
114
131
|
and resp.status.code != status_code_pb2.ZERO
|
115
132
|
):
|
133
|
+
status_str = f"{resp.status.code} ERROR"
|
134
|
+
duration_ms = (time.time() - start_time) * 1000
|
135
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
116
136
|
return service_pb2.RunnerItemOutput(multi_output_response=resp)
|
117
137
|
successes = []
|
118
138
|
for output in resp.outputs:
|
@@ -126,18 +146,24 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
126
146
|
code=status_code_pb2.SUCCESS,
|
127
147
|
description="Success",
|
128
148
|
)
|
149
|
+
status_str = STATUS_OK
|
129
150
|
elif any(successes):
|
130
151
|
status = status_pb2.Status(
|
131
152
|
code=status_code_pb2.MIXED_STATUS,
|
132
153
|
description="Mixed Status",
|
133
154
|
)
|
155
|
+
status_str = STATUS_MIXED
|
134
156
|
else:
|
135
157
|
status = status_pb2.Status(
|
136
158
|
code=status_code_pb2.FAILURE,
|
137
159
|
description="Failed",
|
138
160
|
)
|
161
|
+
status_str = STATUS_FAIL
|
139
162
|
|
140
163
|
resp.status.CopyFrom(status)
|
164
|
+
if logging:
|
165
|
+
duration_ms = (time.time() - start_time) * 1000
|
166
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
141
167
|
return service_pb2.RunnerItemOutput(multi_output_response=resp)
|
142
168
|
|
143
169
|
def runner_item_generate(
|
@@ -150,12 +176,21 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
150
176
|
request = runner_item.post_model_outputs_request
|
151
177
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
152
178
|
|
179
|
+
# --- Live logging additions ---
|
180
|
+
start_time = time.time()
|
181
|
+
req_id = get_req_id_from_context()
|
182
|
+
status_str = STATUS_UNKNOWN
|
183
|
+
endpoint = "POST /v2/.../outputs/generate"
|
184
|
+
|
153
185
|
for resp in self.model.generate_wrapper(request):
|
154
186
|
# if we have any non-successful code already it's an error we can return.
|
155
187
|
if (
|
156
188
|
resp.status.code != status_code_pb2.SUCCESS
|
157
189
|
and resp.status.code != status_code_pb2.ZERO
|
158
190
|
):
|
191
|
+
status_str = f"{resp.status.code} ERROR"
|
192
|
+
duration_ms = (time.time() - start_time) * 1000
|
193
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
159
194
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
160
195
|
continue
|
161
196
|
successes = []
|
@@ -170,30 +205,44 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
170
205
|
code=status_code_pb2.SUCCESS,
|
171
206
|
description="Success",
|
172
207
|
)
|
208
|
+
status_str = STATUS_OK
|
173
209
|
elif any(successes):
|
174
210
|
status = status_pb2.Status(
|
175
211
|
code=status_code_pb2.MIXED_STATUS,
|
176
212
|
description="Mixed Status",
|
177
213
|
)
|
214
|
+
status_str = STATUS_MIXED
|
178
215
|
else:
|
179
216
|
status = status_pb2.Status(
|
180
217
|
code=status_code_pb2.FAILURE,
|
181
218
|
description="Failed",
|
182
219
|
)
|
220
|
+
status_str = STATUS_FAIL
|
183
221
|
resp.status.CopyFrom(status)
|
184
222
|
|
185
223
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
186
224
|
|
225
|
+
duration_ms = (time.time() - start_time) * 1000
|
226
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
227
|
+
|
187
228
|
def runner_item_stream(
|
188
229
|
self, runner_item_iterator: Iterator[service_pb2.RunnerItem]
|
189
230
|
) -> Iterator[service_pb2.RunnerItemOutput]:
|
190
231
|
# Call the generate() method the underlying model implements.
|
232
|
+
start_time = time.time()
|
233
|
+
req_id = get_req_id_from_context()
|
234
|
+
status_str = STATUS_UNKNOWN
|
235
|
+
endpoint = "POST /v2/.../outputs/stream "
|
236
|
+
|
191
237
|
for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
|
192
238
|
# if we have any non-successful code already it's an error we can return.
|
193
239
|
if (
|
194
240
|
resp.status.code != status_code_pb2.SUCCESS
|
195
241
|
and resp.status.code != status_code_pb2.ZERO
|
196
242
|
):
|
243
|
+
status_str = f"{resp.status.code} ERROR"
|
244
|
+
duration_ms = (time.time() - start_time) * 1000
|
245
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
197
246
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
198
247
|
continue
|
199
248
|
successes = []
|
@@ -208,20 +257,26 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
208
257
|
code=status_code_pb2.SUCCESS,
|
209
258
|
description="Success",
|
210
259
|
)
|
260
|
+
status_str = STATUS_OK
|
211
261
|
elif any(successes):
|
212
262
|
status = status_pb2.Status(
|
213
263
|
code=status_code_pb2.MIXED_STATUS,
|
214
264
|
description="Mixed Status",
|
215
265
|
)
|
266
|
+
status_str = STATUS_MIXED
|
216
267
|
else:
|
217
268
|
status = status_pb2.Status(
|
218
269
|
code=status_code_pb2.FAILURE,
|
219
270
|
description="Failed",
|
220
271
|
)
|
272
|
+
status_str = STATUS_FAIL
|
221
273
|
resp.status.CopyFrom(status)
|
222
274
|
|
223
275
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
224
276
|
|
277
|
+
duration_ms = (time.time() - start_time) * 1000
|
278
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
279
|
+
|
225
280
|
|
226
281
|
def pmo_iterator(runner_item_iterator, auth_helper=None):
|
227
282
|
for runner_item in runner_item_iterator:
|
clarifai/runners/server.py
CHANGED
@@ -44,62 +44,66 @@ def generate_client_script(
|
|
44
44
|
model_id,
|
45
45
|
)
|
46
46
|
|
47
|
-
_CLIENT_TEMPLATE = """
|
47
|
+
_CLIENT_TEMPLATE = f"""
|
48
48
|
import asyncio
|
49
49
|
import os
|
50
|
+
|
50
51
|
from fastmcp import Client
|
51
52
|
from fastmcp.client.transports import StreamableHttpTransport
|
52
53
|
|
53
|
-
transport = StreamableHttpTransport(
|
54
|
-
|
54
|
+
transport = StreamableHttpTransport(
|
55
|
+
url="{mcp_url}",
|
56
|
+
headers={{"Authorization": "Bearer " + os.environ["CLARIFAI_PAT"]}},
|
57
|
+
)
|
55
58
|
|
56
59
|
async def main():
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
60
|
+
async with Client(transport) as client:
|
61
|
+
tools = await client.list_tools()
|
62
|
+
print(f"Available tools: {{tools}}")
|
63
|
+
# TODO: update the dictionary of arguments passed to call_tool to make sense for your MCP.
|
64
|
+
result = await client.call_tool(tools[0].name, {{"a": 5, "b": 3}})
|
65
|
+
print(f"Result: {{result[0].text}}")
|
63
66
|
|
64
67
|
if __name__ == "__main__":
|
65
|
-
|
68
|
+
asyncio.run(main())
|
66
69
|
"""
|
67
|
-
return _CLIENT_TEMPLATE
|
70
|
+
return _CLIENT_TEMPLATE
|
68
71
|
|
69
72
|
if has_signature_method(OPENAI_TRANSPORT_NAME, method_signatures):
|
70
73
|
openai_api_base = url_helper.openai_api_url()
|
71
74
|
model_ui_url = url_helper.clarifai_url(user_id, app_id, "models", model_id)
|
72
|
-
_CLIENT_TEMPLATE = """
|
75
|
+
_CLIENT_TEMPLATE = f"""
|
73
76
|
import os
|
77
|
+
|
74
78
|
from openai import OpenAI
|
75
79
|
|
76
80
|
client = OpenAI(
|
77
|
-
base_url="
|
81
|
+
base_url="{openai_api_base}",
|
78
82
|
api_key=os.environ['CLARIFAI_PAT'],
|
79
83
|
)
|
84
|
+
|
80
85
|
response = client.chat.completions.create(
|
81
|
-
model="
|
86
|
+
model="{model_ui_url}",
|
82
87
|
messages=[
|
83
|
-
{"role": "system", "content": "Talk like a pirate."},
|
84
|
-
{
|
88
|
+
{{"role": "system", "content": "Talk like a pirate."}},
|
89
|
+
{{
|
85
90
|
"role": "user",
|
86
91
|
"content": "How do I check if a Python object is an instance of a class?",
|
87
|
-
},
|
92
|
+
}},
|
88
93
|
],
|
89
94
|
temperature=0.7,
|
90
|
-
stream=False,
|
95
|
+
stream=False, # stream=True also works, just iterator over the response
|
91
96
|
)
|
92
97
|
print(response)
|
93
98
|
"""
|
94
|
-
return _CLIENT_TEMPLATE
|
95
|
-
|
96
|
-
_CLIENT_TEMPLATE =
|
97
|
-
import os
|
98
|
-
|
99
|
-
from clarifai.
|
100
|
-
|
101
|
-
|
102
|
-
"""
|
99
|
+
return _CLIENT_TEMPLATE
|
100
|
+
# Generate client template
|
101
|
+
_CLIENT_TEMPLATE = (
|
102
|
+
"import os\n\n"
|
103
|
+
"from clarifai.client import Model\n"
|
104
|
+
"from clarifai.runners.utils import data_types\n\n"
|
105
|
+
"{model_section}\n"
|
106
|
+
)
|
103
107
|
if deployment_id and (compute_cluster_id or nodepool_id):
|
104
108
|
raise ValueError(
|
105
109
|
"You can only specify one of deployment_id or compute_cluster_id and nodepool_id."
|
@@ -108,26 +112,26 @@ from clarifai.runners.utils import data_types
|
|
108
112
|
deployment_id = None
|
109
113
|
else:
|
110
114
|
deployment_id = (
|
111
|
-
|
115
|
+
'os.environ["CLARIFAI_DEPLOYMENT_ID"]' if not deployment_id else repr(deployment_id)
|
112
116
|
)
|
113
117
|
|
114
118
|
deployment_line = (
|
115
|
-
f'deployment_id
|
119
|
+
f'deployment_id={deployment_id}, # Only needed for dedicated deployed models'
|
116
120
|
if deployment_id
|
117
121
|
else ""
|
118
122
|
)
|
119
123
|
compute_cluster_line = (
|
120
|
-
f'compute_cluster_id
|
124
|
+
f'compute_cluster_id="{compute_cluster_id}",' if compute_cluster_id else ""
|
121
125
|
)
|
122
126
|
nodepool_line = (
|
123
|
-
f'nodepool_id
|
127
|
+
f'nodepool_id="{nodepool_id}", # Only needed for dedicated nodepool'
|
124
128
|
if nodepool_id
|
125
129
|
else ""
|
126
130
|
)
|
127
131
|
|
128
132
|
base_url_str = ""
|
129
133
|
if base_url is not None:
|
130
|
-
base_url_str = f
|
134
|
+
base_url_str = f'base_url="{base_url}",'
|
131
135
|
|
132
136
|
# Join all non-empty lines
|
133
137
|
optional_lines = "\n ".join(
|
@@ -138,19 +142,17 @@ from clarifai.runners.utils import data_types
|
|
138
142
|
|
139
143
|
if use_ctx:
|
140
144
|
model_section = """
|
141
|
-
model = Model.from_current_context()
|
145
|
+
model = Model.from_current_context()
|
146
|
+
"""
|
142
147
|
else:
|
143
148
|
model_ui_url = url_helper.clarifai_url(user_id, app_id, "models", model_id)
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
""
|
149
|
+
if optional_lines:
|
150
|
+
model_args = f'"{model_ui_url}",\n {optional_lines}'
|
151
|
+
else:
|
152
|
+
model_args = f'"{model_ui_url}"'
|
153
|
+
model_section = f"model = Model(\n {model_args}\n)"
|
149
154
|
|
150
|
-
|
151
|
-
client_template = _CLIENT_TEMPLATE.format(
|
152
|
-
model_section=model_section,
|
153
|
-
)
|
155
|
+
client_template = _CLIENT_TEMPLATE.format(model_section=model_section.strip("\n"))
|
154
156
|
|
155
157
|
# Generate method signatures
|
156
158
|
method_signatures_str = []
|
@@ -158,8 +160,9 @@ model = Model("{model_ui_url}",
|
|
158
160
|
if method_signature is None:
|
159
161
|
continue
|
160
162
|
method_name = method_signature.name
|
161
|
-
client_script_str = f
|
163
|
+
client_script_str = f"response = model.{method_name}("
|
162
164
|
annotations = _get_annotations_source(method_signature)
|
165
|
+
param_lines = []
|
163
166
|
for idx, (param_name, (param_type, default_value, required)) in enumerate(
|
164
167
|
annotations.items()
|
165
168
|
):
|
@@ -172,8 +175,11 @@ model = Model("{model_ui_url}",
|
|
172
175
|
if param_type == "str" and default_value is not None:
|
173
176
|
default_value = json.dumps(default_value)
|
174
177
|
if default_value is not None:
|
175
|
-
|
176
|
-
|
178
|
+
param_lines.append(f" {param_name}={default_value},")
|
179
|
+
if param_lines:
|
180
|
+
client_script_str += "\n" + "\n".join(param_lines) + "\n)"
|
181
|
+
else:
|
182
|
+
client_script_str += ")"
|
177
183
|
if method_signature.method_type == resources_pb2.RunnerMethodType.UNARY_UNARY:
|
178
184
|
client_script_str += "\nprint(response)"
|
179
185
|
elif method_signature.method_type == resources_pb2.RunnerMethodType.UNARY_STREAMING:
|
@@ -191,7 +197,7 @@ model = Model("{model_ui_url}",
|
|
191
197
|
)
|
192
198
|
script_lines.append("# Example usage:")
|
193
199
|
script_lines.append(client_template)
|
194
|
-
script_lines.append("# Example model prediction from different model methods
|
200
|
+
script_lines.append("# Example model prediction from different model methods:\n")
|
195
201
|
script_lines.append(method_signatures_str)
|
196
202
|
script_lines.append("")
|
197
203
|
script = "\n".join(script_lines)
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import concurrent.futures
|
2
2
|
|
3
3
|
import fsspec
|
4
|
+
import requests
|
4
5
|
|
5
6
|
from clarifai.utils.logging import logger
|
6
7
|
|
@@ -12,6 +13,28 @@ def download_input(input, auth_helper=None):
|
|
12
13
|
_download_input_data(input.data.parts[i].data, auth_helper=auth_helper)
|
13
14
|
|
14
15
|
|
16
|
+
def _download_with_handling(url, mode, auth_kwargs, setter, media_type):
|
17
|
+
fsspec_exceptions = (
|
18
|
+
getattr(fsspec.exceptions, 'FSTimeoutError', Exception),
|
19
|
+
getattr(fsspec.exceptions, 'BlocksizeMismatchError', Exception),
|
20
|
+
)
|
21
|
+
try:
|
22
|
+
with fsspec.open(url, mode, **auth_kwargs) as f:
|
23
|
+
setter(f.read())
|
24
|
+
except fsspec_exceptions as e:
|
25
|
+
logger.error(f"FSSpec error downloading {media_type} from {url}: {e}")
|
26
|
+
raise RuntimeError(f"FSSpec error downloading {media_type} from {url}: {e}") from e
|
27
|
+
except requests.RequestException as e:
|
28
|
+
logger.error(f"Requests error downloading {media_type} from {url}: {e}")
|
29
|
+
raise RuntimeError(f"Requests error downloading {media_type} from {url}: {e}") from e
|
30
|
+
except (IOError, OSError) as e:
|
31
|
+
logger.error(f"IO error downloading {media_type} from {url}: {e}")
|
32
|
+
raise RuntimeError(f"IO error downloading {media_type} from {url}: {e}") from e
|
33
|
+
except Exception as e:
|
34
|
+
logger.error(f"Unexpected error downloading {media_type} from {url}: {e}")
|
35
|
+
raise RuntimeError(f"Unexpected error downloading {media_type} from {url}: {e}") from e
|
36
|
+
|
37
|
+
|
15
38
|
def _download_input_data(input_data, auth_helper=None):
|
16
39
|
"""
|
17
40
|
This function will download any urls that are not already bytes.
|
@@ -26,21 +49,37 @@ def _download_input_data(input_data, auth_helper=None):
|
|
26
49
|
auth_kwargs = _get_auth_kwargs(auth_helper)
|
27
50
|
|
28
51
|
if input_data.image.url and not input_data.image.base64:
|
29
|
-
|
30
|
-
|
31
|
-
|
52
|
+
_download_with_handling(
|
53
|
+
input_data.image.url,
|
54
|
+
'rb',
|
55
|
+
auth_kwargs,
|
56
|
+
lambda val: setattr(input_data.image, 'base64', val),
|
57
|
+
'image',
|
58
|
+
)
|
32
59
|
if input_data.video.url and not input_data.video.base64:
|
33
|
-
|
34
|
-
|
35
|
-
|
60
|
+
_download_with_handling(
|
61
|
+
input_data.video.url,
|
62
|
+
'rb',
|
63
|
+
auth_kwargs,
|
64
|
+
lambda val: setattr(input_data.video, 'base64', val),
|
65
|
+
'video',
|
66
|
+
)
|
36
67
|
if input_data.audio.url and not input_data.audio.base64:
|
37
|
-
|
38
|
-
|
39
|
-
|
68
|
+
_download_with_handling(
|
69
|
+
input_data.audio.url,
|
70
|
+
'rb',
|
71
|
+
auth_kwargs,
|
72
|
+
lambda val: setattr(input_data.audio, 'base64', val),
|
73
|
+
'audio',
|
74
|
+
)
|
40
75
|
if input_data.text.url and not input_data.text.raw:
|
41
|
-
|
42
|
-
|
43
|
-
|
76
|
+
_download_with_handling(
|
77
|
+
input_data.text.url,
|
78
|
+
'r',
|
79
|
+
auth_kwargs,
|
80
|
+
lambda val: setattr(input_data.text, 'raw', val),
|
81
|
+
'text',
|
82
|
+
)
|
44
83
|
|
45
84
|
|
46
85
|
def _get_auth_kwargs(auth_helper):
|