clarifai 11.6.4rc2__py3-none-any.whl → 11.6.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/README.md +39 -0
- clarifai/cli/base.py +107 -118
- clarifai/cli/model.py +62 -79
- clarifai/client/app.py +1 -1
- clarifai/client/auth/helper.py +7 -2
- clarifai/client/base.py +35 -8
- clarifai/client/compute_cluster.py +1 -1
- clarifai/client/model.py +3 -1
- clarifai/client/nodepool.py +4 -2
- clarifai/client/user.py +4 -2
- clarifai/runners/models/model_runner.py +55 -0
- clarifai/runners/server.py +1 -1
- clarifai/runners/utils/url_fetcher.py +51 -12
- clarifai/utils/cli.py +127 -0
- clarifai/utils/constants.py +5 -0
- clarifai/utils/misc.py +47 -0
- {clarifai-11.6.4rc2.dist-info → clarifai-11.6.5.dist-info}/METADATA +2 -2
- clarifai-11.6.5.dist-info/RECORD +127 -0
- {clarifai-11.6.4rc2.dist-info → clarifai-11.6.5.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-311.pyc +0 -0
- clarifai/__pycache__/errors.cpython-39.pyc +0 -0
- clarifai/__pycache__/versions.cpython-311.pyc +0 -0
- clarifai/__pycache__/versions.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-39.pyc +0 -0
- clarifai/cli/model_templates.py +0 -243
- clarifai/cli/pipeline_step_templates.py +0 -64
- clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/model_client.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/model_client.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/runner.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-39.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-39.pyc +0 -0
- clarifai/models/model_serving/README.md +0 -158
- clarifai/models/model_serving/__init__.py +0 -14
- clarifai/models/model_serving/cli/__init__.py +0 -12
- clarifai/models/model_serving/cli/_utils.py +0 -53
- clarifai/models/model_serving/cli/base.py +0 -14
- clarifai/models/model_serving/cli/build.py +0 -79
- clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
- clarifai/models/model_serving/cli/create.py +0 -171
- clarifai/models/model_serving/cli/example_cli.py +0 -34
- clarifai/models/model_serving/cli/login.py +0 -26
- clarifai/models/model_serving/cli/upload.py +0 -179
- clarifai/models/model_serving/constants.py +0 -21
- clarifai/models/model_serving/docs/cli.md +0 -161
- clarifai/models/model_serving/docs/concepts.md +0 -229
- clarifai/models/model_serving/docs/dependencies.md +0 -11
- clarifai/models/model_serving/docs/inference_parameters.md +0 -139
- clarifai/models/model_serving/docs/model_types.md +0 -19
- clarifai/models/model_serving/model_config/__init__.py +0 -16
- clarifai/models/model_serving/model_config/base.py +0 -369
- clarifai/models/model_serving/model_config/config.py +0 -312
- clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
- clarifai/models/model_serving/model_config/output.py +0 -133
- clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
- clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
- clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
- clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
- clarifai/models/model_serving/repo_build/__init__.py +0 -14
- clarifai/models/model_serving/repo_build/build.py +0 -198
- clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
- clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
- clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
- clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
- clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
- clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
- clarifai/models/model_serving/utils.py +0 -23
- clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/mcp_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
- clarifai/runners/models/__pycache__/openai_class.cpython-311.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/code_script.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/code_script.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/method_signatures.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/model_utils.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/openai_convertor.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/serializers.cpython-311.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -231
- clarifai/runners/utils/data_types/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/data_types.cpython-311.pyc +0 -0
- clarifai/runners/utils/data_types/__pycache__/data_types.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_types.py +0 -471
- clarifai/runners/utils/temp.py +0 -59
- clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
- clarifai/utils/__pycache__/protobuf.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
- clarifai-11.6.4rc2.dist-info/RECORD +0 -301
- {clarifai-11.6.4rc2.dist-info → clarifai-11.6.5.dist-info}/entry_points.txt +0 -0
- {clarifai-11.6.4rc2.dist-info → clarifai-11.6.5.dist-info}/licenses/LICENSE +0 -0
- {clarifai-11.6.4rc2.dist-info → clarifai-11.6.5.dist-info}/top_level.txt +0 -0
clarifai/client/auth/helper.py
CHANGED
@@ -2,6 +2,7 @@ import os
|
|
2
2
|
import urllib.request
|
3
3
|
from typing import Any, Dict
|
4
4
|
|
5
|
+
import grpc
|
5
6
|
from clarifai_grpc.channel.clarifai_channel import ClarifaiChannel
|
6
7
|
from clarifai_grpc.grpc.api import resources_pb2, service_pb2_grpc
|
7
8
|
|
@@ -289,7 +290,11 @@ class ClarifaiAuthHelper:
|
|
289
290
|
)
|
290
291
|
|
291
292
|
def get_stub(self) -> service_pb2_grpc.V2Stub:
|
292
|
-
|
293
|
+
stub, channel = self.get_stub_and_channel()
|
294
|
+
return stub
|
295
|
+
|
296
|
+
def get_stub_and_channel(self) -> tuple[service_pb2_grpc.V2Stub, grpc.Channel]:
|
297
|
+
"""Get the API gRPC stub and channel based on the API endpoint base.
|
293
298
|
|
294
299
|
Returns:
|
295
300
|
stub: The service_pb2_grpc.V2Stub stub for the API.
|
@@ -310,7 +315,7 @@ class ClarifaiAuthHelper:
|
|
310
315
|
port = 80
|
311
316
|
channel = ClarifaiChannel.get_insecure_grpc_channel(base=host, port=port)
|
312
317
|
stub = service_pb2_grpc.V2Stub(channel)
|
313
|
-
return stub
|
318
|
+
return stub, channel
|
314
319
|
|
315
320
|
def get_async_stub(self) -> service_pb2_grpc.V2Stub:
|
316
321
|
"""Get the API gRPC async stub using the right channel based on the API endpoint base.
|
clarifai/client/base.py
CHANGED
@@ -11,7 +11,7 @@ from clarifai.client.auth.helper import ClarifaiAuthHelper
|
|
11
11
|
from clarifai.constants.base import COMPUTE_ORCHESTRATION_RESOURCES
|
12
12
|
from clarifai.errors import ApiError, UserError
|
13
13
|
from clarifai.utils.constants import CLARIFAI_PAT_ENV_VAR, CLARIFAI_SESSION_TOKEN_ENV_VAR
|
14
|
-
from clarifai.utils.misc import
|
14
|
+
from clarifai.utils.misc import get_from_dict_env_or_config
|
15
15
|
|
16
16
|
|
17
17
|
class BaseClient:
|
@@ -39,15 +39,42 @@ class BaseClient:
|
|
39
39
|
def __init__(self, **kwargs):
|
40
40
|
token, pat = "", ""
|
41
41
|
try:
|
42
|
-
pat =
|
42
|
+
pat = get_from_dict_env_or_config(key="pat", env_key=CLARIFAI_PAT_ENV_VAR, **kwargs)
|
43
43
|
except UserError:
|
44
|
-
|
45
|
-
|
46
|
-
|
44
|
+
try:
|
45
|
+
token = get_from_dict_env_or_config(
|
46
|
+
key="token", env_key=CLARIFAI_SESSION_TOKEN_ENV_VAR, **kwargs
|
47
|
+
)
|
48
|
+
except UserError:
|
49
|
+
pass
|
47
50
|
finally:
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
+
if not (token or pat):
|
52
|
+
raise UserError(
|
53
|
+
"Authentication Required. Please authenticate in one of the following ways:\n\n"
|
54
|
+
"- Pass your Personal Access Token ('pat') or session token ('token') as arguments to your function.\n"
|
55
|
+
"- Set the CLARIFAI_PAT or CLARIFAI_SESSION_TOKEN environment variables in your environment.\n"
|
56
|
+
"- Run `clarifai login` in your terminal to configure CLI authentication."
|
57
|
+
)
|
58
|
+
|
59
|
+
# Also try to get user_id and base from CLI config if not provided
|
60
|
+
if not kwargs.get('user_id'):
|
61
|
+
try:
|
62
|
+
user_id = get_from_dict_env_or_config(
|
63
|
+
key="user_id", env_key="CLARIFAI_USER_ID", **kwargs
|
64
|
+
)
|
65
|
+
kwargs['user_id'] = user_id
|
66
|
+
except UserError:
|
67
|
+
pass # user_id is optional for some use cases
|
68
|
+
|
69
|
+
if not kwargs.get('base'):
|
70
|
+
try:
|
71
|
+
base = get_from_dict_env_or_config(
|
72
|
+
key="base", env_key="CLARIFAI_API_BASE", **kwargs
|
73
|
+
)
|
74
|
+
kwargs['base'] = base
|
75
|
+
except UserError:
|
76
|
+
pass # base has a default value
|
77
|
+
|
51
78
|
kwargs.update({'token': token, 'pat': pat})
|
52
79
|
|
53
80
|
self.auth_helper = ClarifaiAuthHelper(**kwargs, validate=False)
|
@@ -165,7 +165,7 @@ class ComputeCluster(Lister, BaseClient):
|
|
165
165
|
response = self._grpc_request(self.STUB.PostNodepools, request)
|
166
166
|
if response.status.code != status_code_pb2.SUCCESS:
|
167
167
|
raise Exception(response.status)
|
168
|
-
self.logger.info("
|
168
|
+
self.logger.info(f"Nodepool with ID '{nodepool_id}' is created:\n{response.status}")
|
169
169
|
|
170
170
|
dict_response = MessageToDict(response.nodepools[0], preserving_proto_field_name=True)
|
171
171
|
kwargs = self.process_response_keys(dict_response, 'nodepool')
|
clarifai/client/model.py
CHANGED
@@ -445,7 +445,9 @@ class Model(Lister, BaseClient):
|
|
445
445
|
response = self._grpc_request(self.STUB.PostModelVersions, request)
|
446
446
|
if response.status.code != status_code_pb2.SUCCESS:
|
447
447
|
raise Exception(response.status)
|
448
|
-
self.logger.info(
|
448
|
+
self.logger.info(
|
449
|
+
f"Model Version with ID '{response.model.model_version.id}' is created:\n{response.status}"
|
450
|
+
)
|
449
451
|
|
450
452
|
kwargs.update({'app_id': self.app_id, 'user_id': self.user_id})
|
451
453
|
dict_response = MessageToDict(response, preserving_proto_field_name=True)
|
clarifai/client/nodepool.py
CHANGED
@@ -195,7 +195,9 @@ class Nodepool(Lister, BaseClient):
|
|
195
195
|
response = self._grpc_request(self.STUB.PostDeployments, request)
|
196
196
|
if response.status.code != status_code_pb2.SUCCESS:
|
197
197
|
raise Exception(response.status)
|
198
|
-
self.logger.info(
|
198
|
+
self.logger.info(
|
199
|
+
f"Deployment with ID '{response.deployments[0].id}' is created:\n{response.status}"
|
200
|
+
)
|
199
201
|
|
200
202
|
dict_response = MessageToDict(
|
201
203
|
response.deployments[0], preserving_proto_field_name=True, use_integers_for_enums=True
|
@@ -324,7 +326,7 @@ class Nodepool(Lister, BaseClient):
|
|
324
326
|
if response.status.code != status_code_pb2.SUCCESS:
|
325
327
|
raise Exception(response.status)
|
326
328
|
self.logger.info(
|
327
|
-
"
|
329
|
+
f"Runner with ID '{response.runners[0].id}' is created:\n{response.status}"
|
328
330
|
)
|
329
331
|
|
330
332
|
dict_response = MessageToDict(response.runners[0], preserving_proto_field_name=True)
|
clarifai/client/user.py
CHANGED
@@ -177,7 +177,7 @@ class User(Lister, BaseClient):
|
|
177
177
|
response = self._grpc_request(self.STUB.PostApps, request)
|
178
178
|
if response.status.code != status_code_pb2.SUCCESS:
|
179
179
|
raise Exception(response.status)
|
180
|
-
self.logger.info("
|
180
|
+
self.logger.info(f"App with ID '{app_id}' is created:\n{response.status}")
|
181
181
|
return App.from_auth_helper(auth=self.auth_helper, app_id=app_id)
|
182
182
|
|
183
183
|
def _process_compute_cluster_config(
|
@@ -255,7 +255,9 @@ class User(Lister, BaseClient):
|
|
255
255
|
response = self._grpc_request(self.STUB.PostComputeClusters, request)
|
256
256
|
if response.status.code != status_code_pb2.SUCCESS:
|
257
257
|
raise Exception(response.status)
|
258
|
-
self.logger.info(
|
258
|
+
self.logger.info(
|
259
|
+
f"Compute Cluster with ID '{compute_cluster_id}' is created:\n{response.status}"
|
260
|
+
)
|
259
261
|
return ComputeCluster.from_auth_helper(
|
260
262
|
auth=self.auth_helper, compute_cluster_id=compute_cluster_id
|
261
263
|
)
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import time
|
1
2
|
from typing import Iterator
|
2
3
|
|
3
4
|
from clarifai_grpc.grpc.api import service_pb2
|
@@ -6,6 +7,8 @@ from clarifai_protocol import BaseRunner
|
|
6
7
|
from clarifai_protocol.utils.health import HealthProbeRequestHandler
|
7
8
|
|
8
9
|
from clarifai.client.auth.helper import ClarifaiAuthHelper
|
10
|
+
from clarifai.utils.constants import STATUS_FAIL, STATUS_MIXED, STATUS_OK, STATUS_UNKNOWN
|
11
|
+
from clarifai.utils.logging import get_req_id_from_context, logger
|
9
12
|
|
10
13
|
from ..utils.url_fetcher import ensure_urls_downloaded
|
11
14
|
from .model_class import ModelClass
|
@@ -106,6 +109,20 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
106
109
|
raise Exception("Unexpected work item type: {}".format(runner_item))
|
107
110
|
request = runner_item.post_model_outputs_request
|
108
111
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
112
|
+
start_time = time.time()
|
113
|
+
req_id = get_req_id_from_context()
|
114
|
+
status_str = STATUS_UNKNOWN
|
115
|
+
# Endpoint is always POST /v2/.../outputs for this runner
|
116
|
+
endpoint = "POST /v2/.../outputs "
|
117
|
+
|
118
|
+
# if method_name == '_GET_SIGNATURES' then the request is for getting signatures and we don't want to log it.
|
119
|
+
# This is a workaround to avoid logging the _GET_SIGNATURES method call.
|
120
|
+
method_name = None
|
121
|
+
logging = True
|
122
|
+
if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
|
123
|
+
method_name = request.inputs[0].data.metadata['_method_name']
|
124
|
+
if method_name == '_GET_SIGNATURES':
|
125
|
+
logging = False
|
109
126
|
|
110
127
|
resp = self.model.predict_wrapper(request)
|
111
128
|
# if we have any non-successful code already it's an error we can return.
|
@@ -113,6 +130,9 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
113
130
|
resp.status.code != status_code_pb2.SUCCESS
|
114
131
|
and resp.status.code != status_code_pb2.ZERO
|
115
132
|
):
|
133
|
+
status_str = f"{resp.status.code} ERROR"
|
134
|
+
duration_ms = (time.time() - start_time) * 1000
|
135
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
116
136
|
return service_pb2.RunnerItemOutput(multi_output_response=resp)
|
117
137
|
successes = []
|
118
138
|
for output in resp.outputs:
|
@@ -126,18 +146,24 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
126
146
|
code=status_code_pb2.SUCCESS,
|
127
147
|
description="Success",
|
128
148
|
)
|
149
|
+
status_str = STATUS_OK
|
129
150
|
elif any(successes):
|
130
151
|
status = status_pb2.Status(
|
131
152
|
code=status_code_pb2.MIXED_STATUS,
|
132
153
|
description="Mixed Status",
|
133
154
|
)
|
155
|
+
status_str = STATUS_MIXED
|
134
156
|
else:
|
135
157
|
status = status_pb2.Status(
|
136
158
|
code=status_code_pb2.FAILURE,
|
137
159
|
description="Failed",
|
138
160
|
)
|
161
|
+
status_str = STATUS_FAIL
|
139
162
|
|
140
163
|
resp.status.CopyFrom(status)
|
164
|
+
if logging:
|
165
|
+
duration_ms = (time.time() - start_time) * 1000
|
166
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
141
167
|
return service_pb2.RunnerItemOutput(multi_output_response=resp)
|
142
168
|
|
143
169
|
def runner_item_generate(
|
@@ -150,12 +176,21 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
150
176
|
request = runner_item.post_model_outputs_request
|
151
177
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
152
178
|
|
179
|
+
# --- Live logging additions ---
|
180
|
+
start_time = time.time()
|
181
|
+
req_id = get_req_id_from_context()
|
182
|
+
status_str = STATUS_UNKNOWN
|
183
|
+
endpoint = "POST /v2/.../outputs/generate"
|
184
|
+
|
153
185
|
for resp in self.model.generate_wrapper(request):
|
154
186
|
# if we have any non-successful code already it's an error we can return.
|
155
187
|
if (
|
156
188
|
resp.status.code != status_code_pb2.SUCCESS
|
157
189
|
and resp.status.code != status_code_pb2.ZERO
|
158
190
|
):
|
191
|
+
status_str = f"{resp.status.code} ERROR"
|
192
|
+
duration_ms = (time.time() - start_time) * 1000
|
193
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
159
194
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
160
195
|
continue
|
161
196
|
successes = []
|
@@ -170,30 +205,44 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
170
205
|
code=status_code_pb2.SUCCESS,
|
171
206
|
description="Success",
|
172
207
|
)
|
208
|
+
status_str = STATUS_OK
|
173
209
|
elif any(successes):
|
174
210
|
status = status_pb2.Status(
|
175
211
|
code=status_code_pb2.MIXED_STATUS,
|
176
212
|
description="Mixed Status",
|
177
213
|
)
|
214
|
+
status_str = STATUS_MIXED
|
178
215
|
else:
|
179
216
|
status = status_pb2.Status(
|
180
217
|
code=status_code_pb2.FAILURE,
|
181
218
|
description="Failed",
|
182
219
|
)
|
220
|
+
status_str = STATUS_FAIL
|
183
221
|
resp.status.CopyFrom(status)
|
184
222
|
|
185
223
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
186
224
|
|
225
|
+
duration_ms = (time.time() - start_time) * 1000
|
226
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
227
|
+
|
187
228
|
def runner_item_stream(
|
188
229
|
self, runner_item_iterator: Iterator[service_pb2.RunnerItem]
|
189
230
|
) -> Iterator[service_pb2.RunnerItemOutput]:
|
190
231
|
# Call the generate() method the underlying model implements.
|
232
|
+
start_time = time.time()
|
233
|
+
req_id = get_req_id_from_context()
|
234
|
+
status_str = STATUS_UNKNOWN
|
235
|
+
endpoint = "POST /v2/.../outputs/stream "
|
236
|
+
|
191
237
|
for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
|
192
238
|
# if we have any non-successful code already it's an error we can return.
|
193
239
|
if (
|
194
240
|
resp.status.code != status_code_pb2.SUCCESS
|
195
241
|
and resp.status.code != status_code_pb2.ZERO
|
196
242
|
):
|
243
|
+
status_str = f"{resp.status.code} ERROR"
|
244
|
+
duration_ms = (time.time() - start_time) * 1000
|
245
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
197
246
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
198
247
|
continue
|
199
248
|
successes = []
|
@@ -208,20 +257,26 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
208
257
|
code=status_code_pb2.SUCCESS,
|
209
258
|
description="Success",
|
210
259
|
)
|
260
|
+
status_str = STATUS_OK
|
211
261
|
elif any(successes):
|
212
262
|
status = status_pb2.Status(
|
213
263
|
code=status_code_pb2.MIXED_STATUS,
|
214
264
|
description="Mixed Status",
|
215
265
|
)
|
266
|
+
status_str = STATUS_MIXED
|
216
267
|
else:
|
217
268
|
status = status_pb2.Status(
|
218
269
|
code=status_code_pb2.FAILURE,
|
219
270
|
description="Failed",
|
220
271
|
)
|
272
|
+
status_str = STATUS_FAIL
|
221
273
|
resp.status.CopyFrom(status)
|
222
274
|
|
223
275
|
yield service_pb2.RunnerItemOutput(multi_output_response=resp)
|
224
276
|
|
277
|
+
duration_ms = (time.time() - start_time) * 1000
|
278
|
+
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
279
|
+
|
225
280
|
|
226
281
|
def pmo_iterator(runner_item_iterator, auth_helper=None):
|
227
282
|
for runner_item in runner_item_iterator:
|
clarifai/runners/server.py
CHANGED
@@ -133,7 +133,7 @@ def serve(
|
|
133
133
|
pat=pat,
|
134
134
|
num_parallel_polls=num_threads,
|
135
135
|
)
|
136
|
-
logger.info("Runner started successfully and is waiting for work from the API
|
136
|
+
logger.info("Runner started successfully and is waiting for work from the API...\n")
|
137
137
|
runner.start() # start the runner to fetch work from the API.
|
138
138
|
|
139
139
|
|
@@ -1,6 +1,7 @@
|
|
1
1
|
import concurrent.futures
|
2
2
|
|
3
3
|
import fsspec
|
4
|
+
import requests
|
4
5
|
|
5
6
|
from clarifai.utils.logging import logger
|
6
7
|
|
@@ -12,6 +13,28 @@ def download_input(input, auth_helper=None):
|
|
12
13
|
_download_input_data(input.data.parts[i].data, auth_helper=auth_helper)
|
13
14
|
|
14
15
|
|
16
|
+
def _download_with_handling(url, mode, auth_kwargs, setter, media_type):
|
17
|
+
fsspec_exceptions = (
|
18
|
+
getattr(fsspec.exceptions, 'FSTimeoutError', Exception),
|
19
|
+
getattr(fsspec.exceptions, 'BlocksizeMismatchError', Exception),
|
20
|
+
)
|
21
|
+
try:
|
22
|
+
with fsspec.open(url, mode, **auth_kwargs) as f:
|
23
|
+
setter(f.read())
|
24
|
+
except fsspec_exceptions as e:
|
25
|
+
logger.error(f"FSSpec error downloading {media_type} from {url}: {e}")
|
26
|
+
raise RuntimeError(f"FSSpec error downloading {media_type} from {url}: {e}") from e
|
27
|
+
except requests.RequestException as e:
|
28
|
+
logger.error(f"Requests error downloading {media_type} from {url}: {e}")
|
29
|
+
raise RuntimeError(f"Requests error downloading {media_type} from {url}: {e}") from e
|
30
|
+
except (IOError, OSError) as e:
|
31
|
+
logger.error(f"IO error downloading {media_type} from {url}: {e}")
|
32
|
+
raise RuntimeError(f"IO error downloading {media_type} from {url}: {e}") from e
|
33
|
+
except Exception as e:
|
34
|
+
logger.error(f"Unexpected error downloading {media_type} from {url}: {e}")
|
35
|
+
raise RuntimeError(f"Unexpected error downloading {media_type} from {url}: {e}") from e
|
36
|
+
|
37
|
+
|
15
38
|
def _download_input_data(input_data, auth_helper=None):
|
16
39
|
"""
|
17
40
|
This function will download any urls that are not already bytes.
|
@@ -26,21 +49,37 @@ def _download_input_data(input_data, auth_helper=None):
|
|
26
49
|
auth_kwargs = _get_auth_kwargs(auth_helper)
|
27
50
|
|
28
51
|
if input_data.image.url and not input_data.image.base64:
|
29
|
-
|
30
|
-
|
31
|
-
|
52
|
+
_download_with_handling(
|
53
|
+
input_data.image.url,
|
54
|
+
'rb',
|
55
|
+
auth_kwargs,
|
56
|
+
lambda val: setattr(input_data.image, 'base64', val),
|
57
|
+
'image',
|
58
|
+
)
|
32
59
|
if input_data.video.url and not input_data.video.base64:
|
33
|
-
|
34
|
-
|
35
|
-
|
60
|
+
_download_with_handling(
|
61
|
+
input_data.video.url,
|
62
|
+
'rb',
|
63
|
+
auth_kwargs,
|
64
|
+
lambda val: setattr(input_data.video, 'base64', val),
|
65
|
+
'video',
|
66
|
+
)
|
36
67
|
if input_data.audio.url and not input_data.audio.base64:
|
37
|
-
|
38
|
-
|
39
|
-
|
68
|
+
_download_with_handling(
|
69
|
+
input_data.audio.url,
|
70
|
+
'rb',
|
71
|
+
auth_kwargs,
|
72
|
+
lambda val: setattr(input_data.audio, 'base64', val),
|
73
|
+
'audio',
|
74
|
+
)
|
40
75
|
if input_data.text.url and not input_data.text.raw:
|
41
|
-
|
42
|
-
|
43
|
-
|
76
|
+
_download_with_handling(
|
77
|
+
input_data.text.url,
|
78
|
+
'r',
|
79
|
+
auth_kwargs,
|
80
|
+
lambda val: setattr(input_data.text, 'raw', val),
|
81
|
+
'text',
|
82
|
+
)
|
44
83
|
|
45
84
|
|
46
85
|
def _get_auth_kwargs(auth_helper):
|
clarifai/utils/cli.py
CHANGED
@@ -220,3 +220,130 @@ def validate_context_auth(pat: str, user_id: str, api_base: str = None):
|
|
220
220
|
logger.error(f"❌ Validation failed: \n{error_msg}")
|
221
221
|
logger.error("Please check your credentials and try again.")
|
222
222
|
raise click.Abort() # Exit without saving the configuration
|
223
|
+
|
224
|
+
|
225
|
+
def customize_ollama_model(model_path, model_name, port, context_length):
|
226
|
+
"""Customize the Ollama model name in the cloned template files.
|
227
|
+
Args:
|
228
|
+
model_path: Path to the cloned model directory
|
229
|
+
model_name: The model name to set (e.g., 'llama3.1', 'mistral')
|
230
|
+
|
231
|
+
"""
|
232
|
+
model_py_path = os.path.join(model_path, "1", "model.py")
|
233
|
+
|
234
|
+
if not os.path.exists(model_py_path):
|
235
|
+
logger.warning(f"Model file {model_py_path} not found, skipping model name customization")
|
236
|
+
return
|
237
|
+
|
238
|
+
try:
|
239
|
+
# Read the model.py file
|
240
|
+
with open(model_py_path, 'r') as file:
|
241
|
+
content = file.read()
|
242
|
+
if model_name:
|
243
|
+
# Replace the default model name in the load_model method
|
244
|
+
content = content.replace(
|
245
|
+
'self.model = os.environ.get("OLLAMA_MODEL_NAME", \'llama3.2\')',
|
246
|
+
f'self.model = os.environ.get("OLLAMA_MODEL_NAME", \'{model_name}\')',
|
247
|
+
)
|
248
|
+
|
249
|
+
if port:
|
250
|
+
# Replace the default port variable in the model.py file
|
251
|
+
content = content.replace("PORT = '23333'", f"PORT = '{port}'")
|
252
|
+
|
253
|
+
if context_length:
|
254
|
+
# Replace the default context length variable in the model.py file
|
255
|
+
content = content.replace(
|
256
|
+
"context_length = '8192'", f"context_length = '{context_length}'"
|
257
|
+
)
|
258
|
+
|
259
|
+
# Write the modified content back to model.py
|
260
|
+
with open(model_py_path, 'w') as file:
|
261
|
+
file.write(content)
|
262
|
+
|
263
|
+
except Exception as e:
|
264
|
+
logger.error(f"Failed to customize Ollama model name in {model_py_path}: {e}")
|
265
|
+
raise
|
266
|
+
|
267
|
+
|
268
|
+
def check_ollama_installed():
|
269
|
+
"""Check if the Ollama CLI is installed."""
|
270
|
+
try:
|
271
|
+
import subprocess
|
272
|
+
|
273
|
+
result = subprocess.run(
|
274
|
+
['ollama', '--version'], capture_output=True, text=True, check=False
|
275
|
+
)
|
276
|
+
if result.returncode == 0:
|
277
|
+
return True
|
278
|
+
else:
|
279
|
+
return False
|
280
|
+
except FileNotFoundError:
|
281
|
+
return False
|
282
|
+
|
283
|
+
|
284
|
+
def _is_package_installed(package_name):
|
285
|
+
"""Helper function to check if a single package in requirements.txt is installed."""
|
286
|
+
import importlib.metadata
|
287
|
+
|
288
|
+
try:
|
289
|
+
importlib.metadata.distribution(package_name)
|
290
|
+
logger.debug(f"✅ {package_name} - installed")
|
291
|
+
return True
|
292
|
+
except importlib.metadata.PackageNotFoundError:
|
293
|
+
logger.debug(f"❌ {package_name} - not installed")
|
294
|
+
return False
|
295
|
+
except Exception as e:
|
296
|
+
logger.warning(f"Error checking {package_name}: {e}")
|
297
|
+
return False
|
298
|
+
|
299
|
+
|
300
|
+
def check_requirements_installed(model_path):
|
301
|
+
"""Check if all dependencies in requirements.txt are installed."""
|
302
|
+
import re
|
303
|
+
from pathlib import Path
|
304
|
+
|
305
|
+
requirements_path = Path(model_path) / "requirements.txt"
|
306
|
+
|
307
|
+
if not requirements_path.exists():
|
308
|
+
logger.warning(f"requirements.txt not found at {requirements_path}")
|
309
|
+
return True
|
310
|
+
|
311
|
+
try:
|
312
|
+
package_pattern = re.compile(r'^([a-zA-Z0-9_-]+)')
|
313
|
+
|
314
|
+
# Getting package name and version (for logging)
|
315
|
+
requirements = [
|
316
|
+
(match.group(1), pack)
|
317
|
+
for line in requirements_path.read_text().splitlines()
|
318
|
+
if (pack := line.strip())
|
319
|
+
and not line.startswith('#')
|
320
|
+
and (match := package_pattern.match(line))
|
321
|
+
]
|
322
|
+
|
323
|
+
if not requirements:
|
324
|
+
logger.info("No dependencies found in requirements.txt")
|
325
|
+
return True
|
326
|
+
|
327
|
+
logger.info(f"Checking {len(requirements)} dependencies...")
|
328
|
+
|
329
|
+
missing = [
|
330
|
+
full_req
|
331
|
+
for package_name, full_req in requirements
|
332
|
+
if not _is_package_installed(package_name)
|
333
|
+
]
|
334
|
+
|
335
|
+
if not missing:
|
336
|
+
logger.info(f"✅ All {len(requirements)} dependencies are installed!")
|
337
|
+
return True
|
338
|
+
|
339
|
+
# Report missing packages
|
340
|
+
logger.error(
|
341
|
+
f"❌ {len(missing)} of {len(requirements)} required packages are missing in the current environment"
|
342
|
+
)
|
343
|
+
logger.error("\n".join(f" - {pkg}" for pkg in missing))
|
344
|
+
logger.warning(f"To install: pip install -r {requirements_path}")
|
345
|
+
return False
|
346
|
+
|
347
|
+
except Exception as e:
|
348
|
+
logger.error(f"Failed to check requirements: {e}")
|
349
|
+
return False
|
clarifai/utils/constants.py
CHANGED
@@ -64,3 +64,8 @@ DEFAULT_LOCAL_RUNNER_NODEPOOL_CONFIG = {
|
|
64
64
|
}
|
65
65
|
DEFAULT_OLLAMA_MODEL_REPO = "https://github.com/Clarifai/runners-examples"
|
66
66
|
DEFAULT_OLLAMA_MODEL_REPO_BRANCH = "ollama"
|
67
|
+
|
68
|
+
STATUS_OK = "200 OK"
|
69
|
+
STATUS_MIXED = "207 MIXED"
|
70
|
+
STATUS_FAIL = "500 FAIL"
|
71
|
+
STATUS_UNKNOWN = "UNKNOWN"
|
clarifai/utils/misc.py
CHANGED
@@ -78,6 +78,53 @@ def get_from_env(key: str, env_key: str) -> str:
|
|
78
78
|
)
|
79
79
|
|
80
80
|
|
81
|
+
def get_from_dict_env_or_config(key: str, env_key: str, **data) -> str:
|
82
|
+
"""Get a value from a dictionary, environment variable, or CLI config context."""
|
83
|
+
# First try the provided data/kwargs
|
84
|
+
if key in data and data[key]:
|
85
|
+
return data[key]
|
86
|
+
|
87
|
+
# Then try environment variables
|
88
|
+
if env_key in os.environ and os.environ[env_key]:
|
89
|
+
return os.environ[env_key]
|
90
|
+
|
91
|
+
# Finally try CLI config context as fallback
|
92
|
+
try:
|
93
|
+
from clarifai.utils.config import Config
|
94
|
+
from clarifai.utils.constants import DEFAULT_CONFIG
|
95
|
+
|
96
|
+
config = Config.from_yaml(filename=DEFAULT_CONFIG)
|
97
|
+
current_context = config.current
|
98
|
+
|
99
|
+
# Convert env_key to the attribute name expected by Context
|
100
|
+
# e.g., CLARIFAI_PAT -> pat, CLARIFAI_USER_ID -> user_id, CLARIFAI_API_BASE -> api_base
|
101
|
+
if env_key == "CLARIFAI_PAT":
|
102
|
+
attr_name = "pat"
|
103
|
+
elif env_key == "CLARIFAI_USER_ID":
|
104
|
+
attr_name = "user_id"
|
105
|
+
elif env_key == "CLARIFAI_API_BASE":
|
106
|
+
attr_name = "api_base"
|
107
|
+
else:
|
108
|
+
# For other cases, convert CLARIFAI_SOMETHING to something
|
109
|
+
attr_name = env_key.replace("CLARIFAI_", "").lower()
|
110
|
+
|
111
|
+
if hasattr(current_context, attr_name):
|
112
|
+
value = getattr(current_context, attr_name)
|
113
|
+
if value:
|
114
|
+
return value
|
115
|
+
except Exception:
|
116
|
+
# If CLI config loading fails, fall through to raise error
|
117
|
+
pass
|
118
|
+
|
119
|
+
# If all methods fail, raise an error suggesting clarifai login
|
120
|
+
raise UserError(
|
121
|
+
f"Configuration Required. Could not find '{key}'. Please provide it in one of the following ways:\n\n"
|
122
|
+
f"- Pass '{key}' as a named parameter to your function.\n"
|
123
|
+
f"- Set the {env_key} environment variable in your environment.\n"
|
124
|
+
f"- Run `clarifai login` in your terminal to configure CLI authentication."
|
125
|
+
)
|
126
|
+
|
127
|
+
|
81
128
|
def concept_relations_accumulation(
|
82
129
|
relations_dict: Dict[str, Any], subject_concept: str, object_concept: str, predicate: str
|
83
130
|
) -> Dict[str, Any]:
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.4
|
2
2
|
Name: clarifai
|
3
|
-
Version: 11.6.
|
3
|
+
Version: 11.6.5
|
4
4
|
Home-page: https://github.com/Clarifai/clarifai-python
|
5
5
|
Author: Clarifai
|
6
6
|
Author-email: support@clarifai.com
|
@@ -19,7 +19,7 @@ Classifier: Operating System :: OS Independent
|
|
19
19
|
Requires-Python: >=3.8
|
20
20
|
Description-Content-Type: text/markdown
|
21
21
|
License-File: LICENSE
|
22
|
-
Requires-Dist: clarifai-grpc>=11.6.
|
22
|
+
Requires-Dist: clarifai-grpc>=11.6.4
|
23
23
|
Requires-Dist: clarifai-protocol>=0.0.25
|
24
24
|
Requires-Dist: numpy>=1.22.0
|
25
25
|
Requires-Dist: tqdm>=4.65.0
|