clarifai 11.2.3rc1__py3-none-any.whl → 11.2.3rc3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/cli/base.py +81 -228
- clarifai/cli/compute_cluster.py +17 -25
- clarifai/cli/deployment.py +41 -67
- clarifai/cli/model.py +39 -26
- clarifai/cli/nodepool.py +40 -59
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/app.py +1 -1
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/client/cli/__init__.py +0 -0
- clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/base_cli.py +88 -0
- clarifai/client/cli/model_cli.py +29 -0
- clarifai/client/model.py +159 -393
- clarifai/client/model_client.py +502 -0
- clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/{model.cpython-312.pyc → model.cpython-310.pyc} +0 -0
- clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/runners/__init__.py +2 -7
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +42 -0
- clarifai/runners/dockerfile_template/Dockerfile.nim +71 -0
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/model_builder.py +138 -51
- clarifai/runners/models/model_class.py +441 -28
- clarifai/runners/models/model_class_refract.py +80 -0
- clarifai/runners/models/model_run_locally.py +25 -89
- clarifai/runners/models/model_runner.py +8 -0
- clarifai/runners/models/model_servicer.py +11 -2
- clarifai/runners/models/model_upload.py +607 -0
- clarifai/runners/models/temp.py +25 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
- clarifai/runners/utils/code_script.py +217 -0
- clarifai/runners/utils/const.py +8 -9
- clarifai/runners/utils/data_handler.py +271 -210
- clarifai/runners/utils/data_handler_refract.py +213 -0
- clarifai/runners/utils/data_types.py +473 -0
- clarifai/runners/utils/data_utils.py +165 -0
- clarifai/runners/utils/loader.py +6 -36
- clarifai/runners/utils/logger.py +0 -0
- clarifai/runners/utils/method_signatures.py +518 -0
- clarifai/runners/utils/serializers.py +222 -0
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/utils/cli.py +33 -132
- clarifai/utils/constants.py +0 -4
- clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
- clarifai/utils/misc.py +0 -2
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc3.dist-info}/METADATA +14 -3
- clarifai-11.2.3rc3.dist-info/RECORD +238 -0
- {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc3.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/__pycache__/errors.cpython-312.pyc +0 -0
- clarifai/__pycache__/versions.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-312.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/model_client.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-312.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-312.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-312.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-312.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-312.pyc +0 -0
- clarifai/constants/__pycache__/base.cpython-312.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-312.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-312.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-312.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-312.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-312.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-312.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-312.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/modules/__pycache__/css.cpython-312.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/runners/__pycache__/server.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-312.pyc +0 -0
- clarifai/runners/models/__pycache__/model_servicer.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_types.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/method_signatures.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/serializers.cpython-312.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-312.pyc +0 -0
- clarifai/schema/__pycache__/search.cpython-312.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/config.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-312.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-312.pyc +0 -0
- clarifai/utils/config.py +0 -105
- clarifai/utils/config.py~ +0 -145
- clarifai/utils/evaluation/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/helpers.cpython-312.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-312.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-312.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-312.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-312.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-312.pyc +0 -0
- clarifai-11.2.3rc1.dist-info/RECORD +0 -185
- {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc3.dist-info}/LICENSE +0 -0
- {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc3.dist-info}/entry_points.txt +0 -0
- {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc3.dist-info}/top_level.txt +0 -0
@@ -1,41 +1,454 @@
|
|
1
|
-
|
2
|
-
|
1
|
+
import inspect
|
2
|
+
import itertools
|
3
|
+
import json
|
4
|
+
import logging
|
5
|
+
import os
|
6
|
+
import traceback
|
7
|
+
from abc import ABC
|
8
|
+
from typing import Any, Dict, Iterator, List
|
3
9
|
|
4
|
-
from clarifai_grpc.grpc.api import service_pb2
|
10
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
11
|
+
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
12
|
+
from google.protobuf import json_format
|
13
|
+
|
14
|
+
from clarifai.runners.utils import data_types
|
15
|
+
from clarifai.runners.utils.method_signatures import (build_function_signature, deserialize,
|
16
|
+
get_stream_from_signature, serialize,
|
17
|
+
signatures_to_json)
|
18
|
+
|
19
|
+
_METHOD_INFO_ATTR = '_cf_method_info'
|
20
|
+
|
21
|
+
_RAISE_EXCEPTIONS = os.getenv("RAISE_EXCEPTIONS", "false").lower() in ("true", "1")
|
5
22
|
|
6
23
|
|
7
24
|
class ModelClass(ABC):
|
25
|
+
'''
|
26
|
+
Base class for model classes that can be run as a service.
|
27
|
+
|
28
|
+
Define predict, generate, or stream methods using the @ModelClass.method decorator.
|
29
|
+
|
30
|
+
Example:
|
31
|
+
|
32
|
+
from clarifai.runners.model_class import ModelClass
|
33
|
+
from clarifai.runners.utils.data_types import NamedFields, Stream
|
34
|
+
|
35
|
+
class MyModel(ModelClass):
|
36
|
+
|
37
|
+
@ModelClass.method
|
38
|
+
def predict(self, x: str, y: int) -> List[str]:
|
39
|
+
return [x] * y
|
40
|
+
|
41
|
+
@ModelClass.method
|
42
|
+
def generate(self, x: str, y: int) -> Stream[str]:
|
43
|
+
for i in range(y):
|
44
|
+
yield x + str(i)
|
45
|
+
|
46
|
+
@ModelClass.method
|
47
|
+
def stream(self, input_stream: Stream[NamedFields(x=str, y=int)]) -> Stream[str]:
|
48
|
+
for item in input_stream:
|
49
|
+
yield item.x + ' ' + str(item.y)
|
50
|
+
'''
|
51
|
+
|
52
|
+
@staticmethod
|
53
|
+
def method(func):
|
54
|
+
setattr(func, _METHOD_INFO_ATTR, _MethodInfo(func))
|
55
|
+
return func
|
56
|
+
|
57
|
+
def set_output_context(self, prompt_tokens=None, completion_tokens=None):
|
58
|
+
"""This is used to set the prompt and completion tokens in the Output proto"""
|
59
|
+
self._prompt_tokens = prompt_tokens
|
60
|
+
self._completion_tokens = completion_tokens
|
61
|
+
|
62
|
+
def load_model(self):
|
63
|
+
"""Load the model."""
|
64
|
+
|
65
|
+
def _handle_get_signatures_request(self) -> service_pb2.MultiOutputResponse:
|
66
|
+
methods = self._get_method_info()
|
67
|
+
signatures = {method.name: method.signature for method in methods.values()}
|
68
|
+
resp = service_pb2.MultiOutputResponse(status=status_pb2.Status(code=status_code_pb2.SUCCESS))
|
69
|
+
output = resp.outputs.add()
|
70
|
+
output.status.code = status_code_pb2.SUCCESS
|
71
|
+
output.data.text.raw = signatures_to_json(signatures)
|
72
|
+
return resp
|
73
|
+
|
74
|
+
def _convert_input_data_to_new_format(
|
75
|
+
self, data: resources_pb2.Data,
|
76
|
+
input_fields: List[resources_pb2.ModelTypeField]) -> resources_pb2.Data:
|
77
|
+
"""Convert input data to new format."""
|
78
|
+
new_data = resources_pb2.Data()
|
79
|
+
for field in input_fields:
|
80
|
+
part_data = self._convert_field(data, field)
|
81
|
+
part = new_data.parts.add()
|
82
|
+
part.id = field.name
|
83
|
+
part.data.CopyFrom(part_data)
|
84
|
+
return new_data
|
85
|
+
|
86
|
+
def _convert_field(self, old_data: resources_pb2.Data,
|
87
|
+
field: resources_pb2.ModelTypeField) -> resources_pb2.Data:
|
88
|
+
data_type = field.type
|
89
|
+
if data_type == resources_pb2.ModelTypeField.DataType.STR:
|
90
|
+
new_data = resources_pb2.Data()
|
91
|
+
new_data.string_value = old_data.text.raw
|
92
|
+
return new_data
|
93
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.IMAGE:
|
94
|
+
new_data = resources_pb2.Data()
|
95
|
+
new_data.image.CopyFrom(old_data.image)
|
96
|
+
return new_data
|
97
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.VIDEO:
|
98
|
+
new_data = resources_pb2.Data()
|
99
|
+
new_data.video.CopyFrom(old_data.video)
|
100
|
+
return new_data
|
101
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.BOOL:
|
102
|
+
new_data = resources_pb2.Data()
|
103
|
+
new_data.bool_value = old_data.bool_value
|
104
|
+
return new_data
|
105
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.INT:
|
106
|
+
new_data = resources_pb2.Data()
|
107
|
+
new_data.int_value = old_data.int_value
|
108
|
+
return new_data
|
109
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.FLOAT:
|
110
|
+
new_data = resources_pb2.Data()
|
111
|
+
new_data.float_value = old_data.float_value
|
112
|
+
return new_data
|
113
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.BYTES:
|
114
|
+
new_data = resources_pb2.Data()
|
115
|
+
new_data.bytes_value = old_data.bytes_value
|
116
|
+
return new_data
|
117
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.NDARRAY:
|
118
|
+
new_data = resources_pb2.Data()
|
119
|
+
new_data.ndarray.CopyFrom(old_data.ndarray)
|
120
|
+
return new_data
|
121
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.JSON_DATA:
|
122
|
+
new_data = resources_pb2.Data()
|
123
|
+
struct_dict = old_data.text.raw
|
124
|
+
new_data.string_value = json.dumps(struct_dict)
|
125
|
+
return new_data
|
126
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.TEXT:
|
127
|
+
new_data = resources_pb2.Data()
|
128
|
+
new_data.text.CopyFrom(old_data.text)
|
129
|
+
return new_data
|
130
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.CONCEPT:
|
131
|
+
new_data = resources_pb2.Data()
|
132
|
+
new_data.concepts.extend(old_data.concepts)
|
133
|
+
return new_data
|
134
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.REGION:
|
135
|
+
new_data = resources_pb2.Data()
|
136
|
+
new_data.regions.extend(old_data.regions)
|
137
|
+
return new_data
|
138
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.FRAME:
|
139
|
+
new_data = resources_pb2.Data()
|
140
|
+
new_data.frames.extend(old_data.frames)
|
141
|
+
return new_data
|
142
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.AUDIO:
|
143
|
+
new_data = resources_pb2.Data()
|
144
|
+
new_data.audio.CopyFrom(old_data.audio)
|
145
|
+
return new_data
|
146
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.LIST:
|
147
|
+
new_data = resources_pb2.Data()
|
148
|
+
if not field.type_args:
|
149
|
+
raise ValueError("LIST type requires type_args")
|
150
|
+
element_field = field.type_args[0]
|
151
|
+
element_data = self._convert_field(old_data, element_field)
|
152
|
+
part = new_data.parts.add()
|
153
|
+
part.data.CopyFrom(element_data)
|
154
|
+
return new_data
|
155
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.TUPLE:
|
156
|
+
new_data = resources_pb2.Data()
|
157
|
+
for element_field in field.type_args:
|
158
|
+
element_data = self._convert_field(old_data, element_field)
|
159
|
+
part = new_data.parts.add()
|
160
|
+
part.data.CopyFrom(element_data)
|
161
|
+
return new_data
|
162
|
+
elif data_type == resources_pb2.ModelTypeField.DataType.NAMED_FIELDS:
|
163
|
+
new_data = resources_pb2.Data()
|
164
|
+
for named_field in field.type_args:
|
165
|
+
part_data = self._convert_field(old_data, named_field)
|
166
|
+
part = new_data.parts.add()
|
167
|
+
part.id = named_field.name
|
168
|
+
part.data.CopyFrom(part_data)
|
169
|
+
return new_data
|
170
|
+
else:
|
171
|
+
raise ValueError(f"Unsupported data type: {data_type}")
|
172
|
+
|
173
|
+
def is_old_format(self, data: resources_pb2.Data) -> bool:
|
174
|
+
"""Check if the Data proto is in the old format (without parts)."""
|
175
|
+
if len(data.parts) > 0:
|
176
|
+
return False # New format uses parts
|
177
|
+
|
178
|
+
# Check if any singular field is set
|
179
|
+
singular_fields = [
|
180
|
+
'image', 'video', 'metadata', 'geo', 'text', 'audio', 'ndarray', 'int_value',
|
181
|
+
'float_value', 'bytes_value', 'bool_value', 'string_value'
|
182
|
+
]
|
183
|
+
for field in singular_fields:
|
184
|
+
if data.HasField(field):
|
185
|
+
return True
|
186
|
+
|
187
|
+
# Check if any repeated field has elements
|
188
|
+
repeated_fields = [
|
189
|
+
'concepts', 'colors', 'clusters', 'embeddings', 'regions', 'frames', 'tracks',
|
190
|
+
'time_segments', 'hits', 'heatmaps'
|
191
|
+
]
|
192
|
+
for field in repeated_fields:
|
193
|
+
if getattr(data, field):
|
194
|
+
return True
|
195
|
+
|
196
|
+
return False
|
197
|
+
|
198
|
+
def _batch_predict(self, method, inputs: List[Dict[str, Any]]) -> List[Any]:
|
199
|
+
"""Batch predict method for multiple inputs."""
|
200
|
+
outputs = []
|
201
|
+
for input in inputs:
|
202
|
+
output = method(**input)
|
203
|
+
outputs.append(output)
|
204
|
+
return outputs
|
205
|
+
|
206
|
+
def _batch_generate(self, method, inputs: List[Dict[str, Any]]) -> Iterator[List[Any]]:
|
207
|
+
"""Batch generate method for multiple inputs."""
|
208
|
+
generators = [method(**input) for input in inputs]
|
209
|
+
for outputs in itertools.zip_longest(*generators):
|
210
|
+
yield outputs
|
8
211
|
|
9
212
|
def predict_wrapper(
|
10
213
|
self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
|
11
|
-
|
12
|
-
|
214
|
+
outputs = []
|
215
|
+
try:
|
216
|
+
# TODO add method name field to proto
|
217
|
+
method_name = 'predict'
|
218
|
+
inference_params = get_inference_params(request)
|
219
|
+
if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
|
220
|
+
method_name = request.inputs[0].data.metadata['_method_name']
|
221
|
+
if method_name == '_GET_SIGNATURES': # special case to fetch signatures, TODO add endpoint for this
|
222
|
+
return self._handle_get_signatures_request()
|
223
|
+
if method_name not in self._get_method_info():
|
224
|
+
raise ValueError(f"Method {method_name} not found in model class")
|
225
|
+
method = getattr(self, method_name)
|
226
|
+
method_info = method._cf_method_info
|
227
|
+
signature = method_info.signature
|
228
|
+
python_param_types = method_info.python_param_types
|
229
|
+
for input in request.inputs:
|
230
|
+
# check if input is in old format
|
231
|
+
if self.is_old_format(input.data):
|
232
|
+
# convert to new format
|
233
|
+
new_data = self._convert_input_data_to_new_format(input.data, signature.input_fields)
|
234
|
+
input.data.CopyFrom(new_data)
|
235
|
+
# convert inputs to python types
|
236
|
+
inputs = self._convert_input_protos_to_python(request.inputs, inference_params,
|
237
|
+
signature.input_fields, python_param_types)
|
238
|
+
if len(inputs) == 1:
|
239
|
+
inputs = inputs[0]
|
240
|
+
output = method(**inputs)
|
241
|
+
outputs.append(self._convert_output_to_proto(output, signature.output_fields))
|
242
|
+
else:
|
243
|
+
outputs = self._batch_predict(method, inputs)
|
244
|
+
outputs = [
|
245
|
+
self._convert_output_to_proto(output, signature.output_fields) for output in outputs
|
246
|
+
]
|
247
|
+
|
248
|
+
return service_pb2.MultiOutputResponse(
|
249
|
+
outputs=outputs, status=status_pb2.Status(code=status_code_pb2.SUCCESS))
|
250
|
+
except Exception as e:
|
251
|
+
if _RAISE_EXCEPTIONS:
|
252
|
+
raise
|
253
|
+
logging.exception("Error in predict")
|
254
|
+
return service_pb2.MultiOutputResponse(status=status_pb2.Status(
|
255
|
+
code=status_code_pb2.FAILURE,
|
256
|
+
details=str(e),
|
257
|
+
stack_trace=traceback.format_exc().split('\n')))
|
13
258
|
|
14
259
|
def generate_wrapper(self, request: service_pb2.PostModelOutputsRequest
|
15
260
|
) -> Iterator[service_pb2.MultiOutputResponse]:
|
16
|
-
|
17
|
-
|
261
|
+
try:
|
262
|
+
method_name = 'generate'
|
263
|
+
inference_params = get_inference_params(request)
|
264
|
+
if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
|
265
|
+
method_name = request.inputs[0].data.metadata['_method_name']
|
266
|
+
method = getattr(self, method_name)
|
267
|
+
method_info = method._cf_method_info
|
268
|
+
signature = method_info.signature
|
269
|
+
python_param_types = method_info.python_param_types
|
270
|
+
for input in request.inputs:
|
271
|
+
# check if input is in old format
|
272
|
+
if self.is_old_format(input.data):
|
273
|
+
# convert to new format
|
274
|
+
new_data = self._convert_input_data_to_new_format(input.data, signature.input_fields)
|
275
|
+
input.data.CopyFrom(new_data)
|
276
|
+
inputs = self._convert_input_protos_to_python(request.inputs, inference_params,
|
277
|
+
signature.input_fields, python_param_types)
|
278
|
+
if len(inputs) == 1:
|
279
|
+
inputs = inputs[0]
|
280
|
+
for output in method(**inputs):
|
281
|
+
resp = service_pb2.MultiOutputResponse()
|
282
|
+
self._convert_output_to_proto(output, signature.output_fields, proto=resp.outputs.add())
|
283
|
+
resp.status.code = status_code_pb2.SUCCESS
|
284
|
+
yield resp
|
285
|
+
else:
|
286
|
+
for outputs in self._batch_generate(method, inputs):
|
287
|
+
resp = service_pb2.MultiOutputResponse()
|
288
|
+
for output in outputs:
|
289
|
+
self._convert_output_to_proto(
|
290
|
+
output, signature.output_fields, proto=resp.outputs.add())
|
291
|
+
resp.status.code = status_code_pb2.SUCCESS
|
292
|
+
yield resp
|
293
|
+
except Exception as e:
|
294
|
+
if _RAISE_EXCEPTIONS:
|
295
|
+
raise
|
296
|
+
logging.exception("Error in generate")
|
297
|
+
yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
|
298
|
+
code=status_code_pb2.FAILURE,
|
299
|
+
details=str(e),
|
300
|
+
stack_trace=traceback.format_exc().split('\n')))
|
18
301
|
|
19
|
-
def stream_wrapper(self,
|
302
|
+
def stream_wrapper(self, request_iterator: Iterator[service_pb2.PostModelOutputsRequest]
|
20
303
|
) -> Iterator[service_pb2.MultiOutputResponse]:
|
21
|
-
|
22
|
-
|
304
|
+
try:
|
305
|
+
request = next(request_iterator) # get first request to determine method
|
306
|
+
assert len(request.inputs) == 1, "Streaming requires exactly one input"
|
23
307
|
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
308
|
+
method_name = 'stream'
|
309
|
+
inference_params = get_inference_params(request)
|
310
|
+
if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
|
311
|
+
method_name = request.inputs[0].data.metadata['_method_name']
|
312
|
+
method = getattr(self, method_name)
|
313
|
+
method_info = method._cf_method_info
|
314
|
+
signature = method_info.signature
|
315
|
+
python_param_types = method_info.python_param_types
|
316
|
+
|
317
|
+
# find the streaming vars in the signature
|
318
|
+
stream_sig = get_stream_from_signature(signature.input_fields)
|
319
|
+
if stream_sig is None:
|
320
|
+
raise ValueError("Streaming method must have a Stream input")
|
321
|
+
stream_argname = stream_sig.name
|
322
|
+
|
323
|
+
for input in request.inputs:
|
324
|
+
# check if input is in old format
|
325
|
+
if self.is_old_format(input.data):
|
326
|
+
# convert to new format
|
327
|
+
new_data = self._convert_input_data_to_new_format(input.data, signature.input_fields)
|
328
|
+
input.data.CopyFrom(new_data)
|
329
|
+
# convert all inputs for the first request, including the first stream value
|
330
|
+
inputs = self._convert_input_protos_to_python(request.inputs, inference_params,
|
331
|
+
signature.input_fields, python_param_types)
|
332
|
+
kwargs = inputs[0]
|
333
|
+
|
334
|
+
# first streaming item
|
335
|
+
first_item = kwargs.pop(stream_argname)
|
336
|
+
|
337
|
+
# streaming generator
|
338
|
+
def InputStream():
|
339
|
+
yield first_item
|
340
|
+
# subsequent streaming items contain only the streaming input
|
341
|
+
for request in request_iterator:
|
342
|
+
item = self._convert_input_protos_to_python(request.inputs, inference_params,
|
343
|
+
[stream_sig], python_param_types)
|
344
|
+
item = item[0][stream_argname]
|
345
|
+
yield item
|
346
|
+
|
347
|
+
# add stream generator back to the input kwargs
|
348
|
+
kwargs[stream_argname] = InputStream()
|
349
|
+
|
350
|
+
for output in method(**kwargs):
|
351
|
+
resp = service_pb2.MultiOutputResponse()
|
352
|
+
self._convert_output_to_proto(output, signature.output_fields, proto=resp.outputs.add())
|
353
|
+
resp.status.code = status_code_pb2.SUCCESS
|
354
|
+
yield resp
|
355
|
+
except Exception as e:
|
356
|
+
if _RAISE_EXCEPTIONS:
|
357
|
+
raise
|
358
|
+
logging.exception("Error in stream")
|
359
|
+
yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
|
360
|
+
code=status_code_pb2.FAILURE,
|
361
|
+
details=str(e),
|
362
|
+
stack_trace=traceback.format_exc().split('\n')))
|
363
|
+
|
364
|
+
def _convert_input_protos_to_python(self, inputs: List[resources_pb2.Input],
|
365
|
+
inference_params: dict,
|
366
|
+
variables_signature: List[resources_pb2.ModelTypeField],
|
367
|
+
python_param_types) -> List[Dict[str, Any]]:
|
368
|
+
result = []
|
369
|
+
for input in inputs:
|
370
|
+
kwargs = deserialize(input.data, variables_signature, inference_params)
|
371
|
+
# dynamic cast to annotated types
|
372
|
+
for k, v in kwargs.items():
|
373
|
+
if k not in python_param_types:
|
374
|
+
continue
|
375
|
+
|
376
|
+
if hasattr(python_param_types[k], "__args__") and getattr(
|
377
|
+
python_param_types[k], "__origin__", None) == data_types.Stream:
|
378
|
+
# get the type of the items in the stream
|
379
|
+
stream_type = python_param_types[k].__args__[0]
|
380
|
+
|
381
|
+
kwargs[k] = data_types.cast(v, stream_type)
|
382
|
+
else:
|
383
|
+
kwargs[k] = data_types.cast(v, python_param_types[k])
|
384
|
+
result.append(kwargs)
|
385
|
+
return result
|
386
|
+
|
387
|
+
def _convert_output_to_proto(self,
|
388
|
+
output: Any,
|
389
|
+
variables_signature: List[resources_pb2.ModelTypeField],
|
390
|
+
proto=None) -> resources_pb2.Output:
|
391
|
+
if proto is None:
|
392
|
+
proto = resources_pb2.Output()
|
393
|
+
serialize({'return': output}, variables_signature, proto.data, is_output=True)
|
394
|
+
proto.status.code = status_code_pb2.SUCCESS
|
395
|
+
if hasattr(self, "_prompt_tokens") and self._prompt_tokens is not None:
|
396
|
+
proto.prompt_tokens = self._prompt_tokens
|
397
|
+
if hasattr(self, "_completion_tokens") and self._completion_tokens is not None:
|
398
|
+
proto.completion_tokens = self._completion_tokens
|
399
|
+
self._prompt_tokens = None
|
400
|
+
self._completion_tokens = None
|
401
|
+
return proto
|
402
|
+
|
403
|
+
@classmethod
|
404
|
+
def _register_model_methods(cls):
|
405
|
+
# go up the class hierarchy to find all decorated methods, and add to registry of current class
|
406
|
+
methods = {}
|
407
|
+
for base in reversed(cls.__mro__):
|
408
|
+
for name, method in base.__dict__.items():
|
409
|
+
method_info = getattr(method, _METHOD_INFO_ATTR, None)
|
410
|
+
if not method_info: # regular function, not a model method
|
411
|
+
continue
|
412
|
+
methods[name] = method_info
|
413
|
+
# check for generic predict(request) -> response, etc. methods
|
414
|
+
#for name in ('predict', 'generate', 'stream'):
|
415
|
+
# if hasattr(cls, name):
|
416
|
+
# method = getattr(cls, name)
|
417
|
+
# if not hasattr(method, _METHOD_INFO_ATTR): # not already put in registry
|
418
|
+
# methods[name] = _MethodInfo(method)
|
419
|
+
# set method table for this class in the registry
|
420
|
+
return methods
|
421
|
+
|
422
|
+
@classmethod
|
423
|
+
def _get_method_info(cls, func_name=None):
|
424
|
+
if not hasattr(cls, _METHOD_INFO_ATTR):
|
425
|
+
setattr(cls, _METHOD_INFO_ATTR, cls._register_model_methods())
|
426
|
+
method_info = getattr(cls, _METHOD_INFO_ATTR)
|
427
|
+
if func_name:
|
428
|
+
return method_info[func_name]
|
429
|
+
return method_info
|
430
|
+
|
431
|
+
|
432
|
+
# Helper function to get the inference params
|
433
|
+
def get_inference_params(request) -> dict:
|
434
|
+
"""Get the inference params from the request."""
|
435
|
+
inference_params = {}
|
436
|
+
if request.model.model_version.id != "":
|
437
|
+
output_info = request.model.model_version.output_info
|
438
|
+
output_info = json_format.MessageToDict(output_info, preserving_proto_field_name=True)
|
439
|
+
if "params" in output_info:
|
440
|
+
inference_params = output_info["params"]
|
441
|
+
return inference_params
|
442
|
+
|
443
|
+
|
444
|
+
class _MethodInfo:
|
445
|
+
|
446
|
+
def __init__(self, method):
|
447
|
+
self.name = method.__name__
|
448
|
+
self.signature = build_function_signature(method)
|
449
|
+
self.python_param_types = {
|
450
|
+
p.name: p.annotation
|
451
|
+
for p in inspect.signature(method).parameters.values()
|
452
|
+
if p.annotation != inspect.Parameter.empty
|
453
|
+
}
|
454
|
+
self.python_param_types.pop('self', None)
|
@@ -0,0 +1,80 @@
|
|
1
|
+
import inspect
|
2
|
+
from abc import ABC, abstractmethod
|
3
|
+
from concurrent.futures import ThreadPoolExecutor
|
4
|
+
from typing import Any, Dict, Iterator, List, get_type_hints
|
5
|
+
|
6
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
7
|
+
|
8
|
+
from clarifai.runners.utils.data_handler import Output, kwargs_to_proto, proto_to_kwargs
|
9
|
+
|
10
|
+
|
11
|
+
class ModelClass(ABC):
|
12
|
+
|
13
|
+
@abstractmethod
|
14
|
+
def load_model(self):
|
15
|
+
raise NotImplementedError("load_model() not implemented")
|
16
|
+
|
17
|
+
@abstractmethod
|
18
|
+
def predict(self, *args, **kwargs) -> Output:
|
19
|
+
raise NotImplementedError("predict() not implemented")
|
20
|
+
|
21
|
+
@abstractmethod
|
22
|
+
def generate(self, *args, **kwargs) -> Iterator[Output]:
|
23
|
+
raise NotImplementedError("generate() not implemented")
|
24
|
+
|
25
|
+
@abstractmethod
|
26
|
+
def stream(self, *args, **kwargs) -> Iterator[Output]:
|
27
|
+
raise NotImplementedError("stream() not implemented")
|
28
|
+
|
29
|
+
def batch_predict(self, inputs: List[Dict[str, Any]]) -> List[Output]:
|
30
|
+
with ThreadPoolExecutor() as executor:
|
31
|
+
return list(executor.map(lambda x: self.predict(**x), inputs))
|
32
|
+
|
33
|
+
def _process_request(self, request, process_func, is_stream=False):
|
34
|
+
inputs = self._convert_proto_to_python(request.inputs)
|
35
|
+
if len(inputs) == 1:
|
36
|
+
result = process_func(**inputs[0])
|
37
|
+
if is_stream:
|
38
|
+
return (self._convert_output_to_proto(output) for output in result)
|
39
|
+
else:
|
40
|
+
return [self._convert_output_to_proto(result)]
|
41
|
+
else:
|
42
|
+
results = self.batch_predict(inputs) if not is_stream else []
|
43
|
+
return [self._convert_output_to_proto(output) for output in results]
|
44
|
+
|
45
|
+
def predict_wrapper(
|
46
|
+
self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
|
47
|
+
outputs = self._process_request(request, self.predict)
|
48
|
+
return service_pb2.MultiOutputResponse(outputs=outputs)
|
49
|
+
|
50
|
+
def generate_wrapper(self, request: service_pb2.PostModelOutputsRequest
|
51
|
+
) -> Iterator[service_pb2.MultiOutputResponse]:
|
52
|
+
outputs = self._process_request(request, self.generate, is_stream=True)
|
53
|
+
for output in outputs:
|
54
|
+
yield service_pb2.MultiOutputResponse(outputs=[output])
|
55
|
+
|
56
|
+
def stream_wrapper(self, requests: Iterator[service_pb2.PostModelOutputsRequest]
|
57
|
+
) -> Iterator[service_pb2.MultiOutputResponse]:
|
58
|
+
for request in requests:
|
59
|
+
outputs = self._process_request(request, self.stream, is_stream=True)
|
60
|
+
yield service_pb2.MultiOutputResponse(outputs=outputs)
|
61
|
+
|
62
|
+
def _convert_proto_to_python(self, inputs: List[resources_pb2.Input]) -> List[Dict[str, Any]]:
|
63
|
+
get_type_hints(self.predict)
|
64
|
+
required_params = [
|
65
|
+
name for name, param in inspect.signature(self.predict).parameters.items()
|
66
|
+
if param.default == inspect.Parameter.empty
|
67
|
+
]
|
68
|
+
kwargs_list = []
|
69
|
+
for input_proto in inputs:
|
70
|
+
kwargs = proto_to_kwargs(input_proto.data)
|
71
|
+
missing = [name for name in required_params if name not in kwargs]
|
72
|
+
if missing:
|
73
|
+
raise ValueError(f"Missing required parameters: {missing}")
|
74
|
+
kwargs_list.append(kwargs)
|
75
|
+
return kwargs_list
|
76
|
+
|
77
|
+
def _convert_output_to_proto(self, output: Any) -> resources_pb2.Output:
|
78
|
+
if isinstance(output, Output):
|
79
|
+
return output.to_proto()
|
80
|
+
return kwargs_to_proto(**output).outputs.add()
|