clarifai 11.1.5__py3-none-any.whl → 11.1.5rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (123) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  4. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  5. clarifai/cli/__main__.py~ +4 -0
  6. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/__main__.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  11. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  12. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  13. clarifai/cli/model.py +25 -0
  14. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  15. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  16. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  17. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  18. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  19. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  21. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
  23. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  24. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  25. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  26. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  27. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  28. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  29. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  30. clarifai/client/model.py +95 -362
  31. clarifai/client/model_client.py +432 -0
  32. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  33. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  34. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  35. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  36. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  37. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  38. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  39. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  40. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  41. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  42. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  43. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  44. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-310.pyc +0 -0
  45. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-310.pyc +0 -0
  46. clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
  47. clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
  48. clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
  49. clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  50. clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
  51. clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
  52. clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
  53. clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
  54. clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
  55. clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
  56. clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
  57. clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
  58. clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
  59. clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
  60. clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
  61. clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
  62. clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
  63. clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
  64. clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
  65. clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
  66. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  67. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  68. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  69. clarifai/runners/__init__.py +2 -7
  70. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  71. clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
  72. clarifai/runners/dockerfile_template/Dockerfile.debug +11 -0
  73. clarifai/runners/dockerfile_template/Dockerfile.debug~ +9 -0
  74. clarifai/runners/dockerfile_template/Dockerfile.template +3 -0
  75. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  76. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  77. clarifai/runners/models/__pycache__/model_builder.cpython-310.pyc +0 -0
  78. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  79. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  80. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  81. clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
  82. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  83. clarifai/runners/models/model_builder.py +33 -7
  84. clarifai/runners/models/model_class.py +273 -28
  85. clarifai/runners/models/model_run_locally.py +3 -78
  86. clarifai/runners/models/model_runner.py +2 -0
  87. clarifai/runners/models/model_servicer.py +11 -2
  88. clarifai/runners/server.py +5 -1
  89. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  90. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  91. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  92. clarifai/runners/utils/__pycache__/data_types.cpython-310.pyc +0 -0
  93. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  94. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  95. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  96. clarifai/runners/utils/__pycache__/method_signatures.cpython-310.pyc +0 -0
  97. clarifai/runners/utils/__pycache__/serializers.cpython-310.pyc +0 -0
  98. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  99. clarifai/runners/utils/data_handler.py +308 -205
  100. clarifai/runners/utils/data_types.py +334 -0
  101. clarifai/runners/utils/method_signatures.py +452 -0
  102. clarifai/runners/utils/serializers.py +132 -0
  103. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  104. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  105. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  106. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  107. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  108. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  109. clarifai/utils/evaluation/__pycache__/__init__.cpython-310.pyc +0 -0
  110. clarifai/utils/evaluation/__pycache__/helpers.cpython-310.pyc +0 -0
  111. clarifai/utils/evaluation/__pycache__/main.cpython-310.pyc +0 -0
  112. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  113. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  114. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  115. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  116. {clarifai-11.1.5.dist-info → clarifai-11.1.5rc6.dist-info}/METADATA +16 -26
  117. clarifai-11.1.5rc6.dist-info/RECORD +203 -0
  118. {clarifai-11.1.5.dist-info → clarifai-11.1.5rc6.dist-info}/WHEEL +1 -1
  119. clarifai/runners/models/base_typed_model.py +0 -238
  120. clarifai-11.1.5.dist-info/RECORD +0 -101
  121. {clarifai-11.1.5.dist-info → clarifai-11.1.5rc6.dist-info}/LICENSE +0 -0
  122. {clarifai-11.1.5.dist-info → clarifai-11.1.5rc6.dist-info}/entry_points.txt +0 -0
  123. {clarifai-11.1.5.dist-info → clarifai-11.1.5rc6.dist-info}/top_level.txt +0 -0
@@ -14,13 +14,14 @@ from google.protobuf import json_format
14
14
  from rich import print
15
15
  from rich.markup import escape
16
16
 
17
- from clarifai.client import BaseClient
17
+ from clarifai.client.base import BaseClient
18
18
  from clarifai.runners.models.model_class import ModelClass
19
19
  from clarifai.runners.utils.const import (
20
20
  AVAILABLE_PYTHON_IMAGES, AVAILABLE_TORCH_IMAGES, CONCEPTS_REQUIRED_MODEL_TYPE,
21
21
  DEFAULT_DOWNLOAD_CHECKPOINT_WHEN, DEFAULT_PYTHON_VERSION, DEFAULT_RUNTIME_DOWNLOAD_PATH,
22
22
  PYTHON_BASE_IMAGE, TORCH_BASE_IMAGE)
23
23
  from clarifai.runners.utils.loader import HuggingFaceLoader
24
+ from clarifai.runners.utils.method_signatures import signatures_to_yaml
24
25
  from clarifai.urls.helper import ClarifaiUrlHelper
25
26
  from clarifai.utils.logging import logger
26
27
  from clarifai.versions import CLIENT_VERSION
@@ -69,6 +70,18 @@ class ModelBuilder:
69
70
  """
70
71
  Create an instance of the model class, as specified in the config file.
71
72
  """
73
+ model_class = self.load_model_class()
74
+
75
+ # initialize the model
76
+ model = model_class()
77
+ if load_model:
78
+ model.load_model()
79
+ return model
80
+
81
+ def load_model_class(self):
82
+ """
83
+ Import the model class from the model.py file.
84
+ """
72
85
  # look for default model.py file location
73
86
  for loc in ["model.py", "1/model.py"]:
74
87
  model_file = os.path.join(self.folder, loc)
@@ -107,12 +120,7 @@ class ModelBuilder:
107
120
  "Could not determine model class. There should be exactly one model inheriting from ModelClass defined in the model.py"
108
121
  )
109
122
  model_class = classes[0]
110
-
111
- # initialize the model
112
- model = model_class()
113
- if load_model:
114
- model.load_model()
115
- return model
123
+ return model_class
116
124
 
117
125
  def _validate_folder(self, folder):
118
126
  if folder == ".":
@@ -226,6 +234,15 @@ class ModelBuilder:
226
234
  )
227
235
  logger.info("Continuing without Hugging Face token")
228
236
 
237
+ num_threads = self.config.get("num_threads")
238
+ if num_threads or num_threads == 0:
239
+ assert isinstance(num_threads, int) and num_threads >= 1, ValueError(
240
+ f"`num_threads` must be an integer greater than or equal to 1. Received type {type(num_threads)} with value {num_threads}."
241
+ )
242
+ else:
243
+ num_threads = int(os.environ.get("CLARIFAI_NUM_THREADS", 1))
244
+ self.config["num_threads"] = num_threads
245
+
229
246
  @staticmethod
230
247
  def _get_tar_file_content_size(tar_file_path):
231
248
  """
@@ -244,6 +261,15 @@ class ModelBuilder:
244
261
  total_size += member.size
245
262
  return total_size
246
263
 
264
+ def method_signatures_yaml(self):
265
+ """
266
+ Returns the method signatures for the model class in YAML format.
267
+ """
268
+ model_class = self.load_model_class()
269
+ method_info = model_class._get_method_info()
270
+ signatures = {name: m.signature for name, m in method_info.items()}
271
+ return signatures_to_yaml(signatures)
272
+
247
273
  @property
248
274
  def client(self):
249
275
  if self._client is None:
@@ -1,41 +1,286 @@
1
- from abc import ABC, abstractmethod
2
- from typing import Iterator
1
+ import inspect
2
+ import itertools
3
+ import logging
4
+ import os
5
+ import traceback
6
+ from abc import ABC
7
+ from typing import Any, Dict, Iterator, List
3
8
 
4
- from clarifai_grpc.grpc.api import service_pb2
9
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
10
+ from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
11
+
12
+ from clarifai.runners.utils import data_types
13
+ from clarifai.runners.utils.method_signatures import (build_function_signature, deserialize,
14
+ get_stream_from_signature, serialize,
15
+ signatures_to_json)
16
+
17
+ _METHOD_INFO_ATTR = '_cf_method_info'
18
+
19
+ _RAISE_EXCEPTIONS = os.getenv("RAISE_EXCEPTIONS", "false").lower() == "true"
20
+
21
+
22
+ class methods:
23
+ '''
24
+ Decorators to mark methods as predict, generate, or stream methods.
25
+ '''
26
+
27
+ @staticmethod
28
+ def predict(method):
29
+ setattr(method, _METHOD_INFO_ATTR, _MethodInfo(method, 'predict'))
30
+ return method
31
+
32
+ @staticmethod
33
+ def generate(method):
34
+ setattr(method, _METHOD_INFO_ATTR, _MethodInfo(method, 'generate'))
35
+ return method
36
+
37
+ @staticmethod
38
+ def stream(method):
39
+ setattr(method, _METHOD_INFO_ATTR, _MethodInfo(method, 'stream'))
40
+ return method
5
41
 
6
42
 
7
43
  class ModelClass(ABC):
44
+ '''
45
+ Base class for model classes that can be run as a service.
46
+
47
+ Define methods as predict, generate, or stream methods using the @methods decorators.
48
+
49
+ Example:
50
+
51
+ from clarifai.runners.model_class import ModelClass, methods
52
+ from clarifai.runners.utils.data_types import Input, Stream
53
+
54
+ class MyModel(ModelClass):
55
+
56
+ @methods.predict
57
+ def predict(self, x: str, y: int) -> List[str]:
58
+ return [x] * y
59
+
60
+ @methods.generate
61
+ def generate(self, x: str, y: int) -> Stream[str]:
62
+ for i in range(y):
63
+ yield x + str(i)
64
+
65
+ @methods.stream
66
+ def stream(self, input_stream: Stream[Input(x=str, y=int)]) -> Stream[str]:
67
+ for item in input_stream:
68
+ yield item.x + ' ' + str(item.y)
69
+ '''
70
+
71
+ def load_model(self):
72
+ """Load the model."""
73
+
74
+ def _handle_get_signatures_request(self) -> service_pb2.MultiOutputResponse:
75
+ methods = self._get_method_info()
76
+ signatures = {method.name: method.signature for method in methods.values()}
77
+ resp = service_pb2.MultiOutputResponse(status=status_pb2.Status(code=status_code_pb2.SUCCESS))
78
+ output = resp.outputs.add()
79
+ output.status.code = status_code_pb2.SUCCESS
80
+ output.data.text.raw = signatures_to_json(signatures)
81
+ return resp
82
+
83
+ def batch_predict(self, method, inputs: List[Dict[str, Any]]) -> List[Any]:
84
+ """Batch predict method for multiple inputs."""
85
+ outputs = []
86
+ for input in inputs:
87
+ output = method(**input)
88
+ outputs.append(output)
89
+ return outputs
90
+
91
+ def batch_generate(self, method, inputs: List[Dict[str, Any]]) -> Iterator[List[Any]]:
92
+ """Batch generate method for multiple inputs."""
93
+ generators = [method(**input) for input in inputs]
94
+ for outputs in itertools.zip_longest(*generators):
95
+ yield outputs
8
96
 
9
97
  def predict_wrapper(
10
98
  self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
11
- """This method is used for input/output proto data conversion"""
12
- return self.predict(request)
99
+ outputs = []
100
+ try:
101
+ # TODO add method name field to proto
102
+ method_name = None
103
+ if len(request.inputs) > 0:
104
+ method_name = request.inputs[0].data.metadata.get('_method_name', None)
105
+ # call_params = dict(request.model.model_version.output_info.params)
106
+ # method_name = call_params.get('_method_name', 'predict')
107
+ if method_name == '_GET_SIGNATURES': # special case to fetch signatures, TODO add endpoint for this
108
+ return self._handle_get_signatures_request()
109
+ if method_name not in self._get_method_info():
110
+ raise ValueError(f"Method {method_name} not found in model class")
111
+ method = getattr(self, method_name)
112
+ method_info = method._cf_method_info
113
+ signature = method_info.signature
114
+ python_param_types = method_info.python_param_types
115
+ inputs = self._convert_input_protos_to_python(request.inputs, signature.inputs,
116
+ python_param_types)
117
+ if len(inputs) == 1:
118
+ inputs = inputs[0]
119
+ output = method(**inputs)
120
+ outputs.append(self._convert_output_to_proto(output, signature.outputs))
121
+ else:
122
+ outputs = self.batch_predict(method, inputs)
123
+ outputs = [self._convert_output_to_proto(output, signature.outputs) for output in outputs]
124
+
125
+ return service_pb2.MultiOutputResponse(
126
+ outputs=outputs, status=status_pb2.Status(code=status_code_pb2.SUCCESS))
127
+ except Exception as e:
128
+ if _RAISE_EXCEPTIONS:
129
+ raise
130
+ logging.exception("Error in predict")
131
+ return service_pb2.MultiOutputResponse(status=status_pb2.Status(
132
+ code=status_code_pb2.FAILURE,
133
+ details=str(e),
134
+ stack_trace=traceback.format_exc().split('\n')))
13
135
 
14
136
  def generate_wrapper(self, request: service_pb2.PostModelOutputsRequest
15
137
  ) -> Iterator[service_pb2.MultiOutputResponse]:
16
- """This method is used for input/output proto data conversion and yield outcome"""
17
- return self.generate(request)
138
+ try:
139
+ call_params = dict(request.model.model_version.output_info.params)
140
+ method_name = call_params.get('_method_name', 'generate')
141
+ method = getattr(self, method_name)
142
+ method_info = method._cf_method_info
143
+ signature = method_info.signature
144
+ python_param_types = method_info.python_param_types
18
145
 
19
- def stream_wrapper(self, request: service_pb2.PostModelOutputsRequest
146
+ inputs = self._convert_input_protos_to_python(request.inputs, signature.inputs,
147
+ python_param_types)
148
+ if len(inputs) == 1:
149
+ inputs = inputs[0]
150
+ for output in method(**inputs):
151
+ resp = service_pb2.MultiOutputResponse()
152
+ self._convert_output_to_proto(output, signature.outputs, proto=resp.outputs.add())
153
+ resp.status.code = status_code_pb2.SUCCESS
154
+ yield resp
155
+ else:
156
+ for outputs in self.batch_generate(method, inputs):
157
+ resp = service_pb2.MultiOutputResponse()
158
+ for output in outputs:
159
+ self._convert_output_to_proto(output, signature.outputs, proto=resp.outputs.add())
160
+ resp.status.code = status_code_pb2.SUCCESS
161
+ yield resp
162
+ except Exception as e:
163
+ if _RAISE_EXCEPTIONS:
164
+ raise
165
+ logging.exception("Error in generate")
166
+ yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
167
+ code=status_code_pb2.FAILURE,
168
+ details=str(e),
169
+ stack_trace=traceback.format_exc().split('\n')))
170
+
171
+ def stream_wrapper(self, request_iterator: Iterator[service_pb2.PostModelOutputsRequest]
20
172
  ) -> Iterator[service_pb2.MultiOutputResponse]:
21
- """This method is used for input/output proto data conversion and yield outcome"""
22
- return self.stream(request)
173
+ try:
174
+ request = next(request_iterator) # get first request to determine method
175
+ assert len(request.inputs) == 1, "Streaming requires exactly one input"
23
176
 
24
- @abstractmethod
25
- def load_model(self):
26
- raise NotImplementedError("load_model() not implemented")
27
-
28
- @abstractmethod
29
- def predict(self,
30
- request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
31
- raise NotImplementedError("run_input() not implemented")
32
-
33
- @abstractmethod
34
- def generate(self, request: service_pb2.PostModelOutputsRequest
35
- ) -> Iterator[service_pb2.MultiOutputResponse]:
36
- raise NotImplementedError("generate() not implemented")
37
-
38
- @abstractmethod
39
- def stream(self, request_iterator: Iterator[service_pb2.PostModelOutputsRequest]
40
- ) -> Iterator[service_pb2.MultiOutputResponse]:
41
- raise NotImplementedError("stream() not implemented")
177
+ call_params = dict(request.model.model_version.output_info.params)
178
+ method_name = call_params.get('_method_name', 'stream')
179
+ method = getattr(self, method_name)
180
+ method_info = method._cf_method_info
181
+ signature = method_info.signature
182
+ python_param_types = method_info.python_param_types
183
+
184
+ # find the streaming vars in the signature
185
+ stream_argname, streaming_var_signatures = get_stream_from_signature(signature.inputs)
186
+
187
+ # convert all inputs for the first request, including the first stream value
188
+ inputs = self._convert_input_protos_to_python(request.inputs, signature.inputs,
189
+ python_param_types)
190
+ kwargs = inputs[0]
191
+
192
+ # first streaming item
193
+ first_item = kwargs.pop(stream_argname)
194
+
195
+ # streaming generator
196
+ def InputStream():
197
+ yield first_item
198
+ # subsequent streaming items contain only the streaming input
199
+ for request in request_iterator:
200
+ item = self._convert_input_protos_to_python(request.inputs, streaming_var_signatures,
201
+ python_param_types)
202
+ item = item[0][stream_argname]
203
+ yield item
204
+
205
+ # add stream generator back to the input kwargs
206
+ kwargs[stream_argname] = InputStream()
207
+
208
+ for output in method(**kwargs):
209
+ resp = service_pb2.MultiOutputResponse()
210
+ self._convert_output_to_proto(output, signature.outputs, proto=resp.outputs.add())
211
+ resp.status.code = status_code_pb2.SUCCESS
212
+ yield resp
213
+ except Exception as e:
214
+ if _RAISE_EXCEPTIONS:
215
+ raise
216
+ logging.exception("Error in stream")
217
+ yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
218
+ code=status_code_pb2.FAILURE,
219
+ details=str(e),
220
+ stack_trace=traceback.format_exc().split('\n')))
221
+
222
+ def _convert_input_protos_to_python(self, inputs: List[resources_pb2.Input], variables_signature,
223
+ python_param_types) -> List[Dict[str, Any]]:
224
+ result = []
225
+ for input in inputs:
226
+ kwargs = deserialize(input.data, variables_signature)
227
+ # dynamic cast to annotated types
228
+ for k, v in kwargs.items():
229
+ if k not in python_param_types:
230
+ continue
231
+ kwargs[k] = data_types.cast(v, python_param_types[k])
232
+ result.append(kwargs)
233
+ return result
234
+
235
+ def _convert_output_to_proto(self, output: Any, variables_signature,
236
+ proto=None) -> resources_pb2.Output:
237
+ if proto is None:
238
+ proto = resources_pb2.Output()
239
+ if isinstance(output, tuple):
240
+ output = {f'return.{i}': item for i, item in enumerate(output)}
241
+ if not isinstance(output, dict): # TODO Output type, not just dict
242
+ output = {'return': output}
243
+ serialize(output, variables_signature, proto.data, is_output=True)
244
+ proto.status.code = status_code_pb2.SUCCESS
245
+ return proto
246
+
247
+ @classmethod
248
+ def _register_model_methods(cls):
249
+ # go up the class hierarchy to find all decorated methods, and add to registry of current class
250
+ methods = {}
251
+ for base in reversed(cls.__mro__):
252
+ for name, method in base.__dict__.items():
253
+ method_info = getattr(method, _METHOD_INFO_ATTR, None)
254
+ if not method_info: # regular function, not a model method
255
+ continue
256
+ methods[name] = method_info
257
+ # check for generic predict(request) -> response, etc. methods
258
+ #for name in ('predict', 'generate', 'stream'):
259
+ # if hasattr(cls, name):
260
+ # method = getattr(cls, name)
261
+ # if not hasattr(method, _METHOD_INFO_ATTR): # not already put in registry
262
+ # methods[name] = _MethodInfo(method, method_type=name)
263
+ # set method table for this class in the registry
264
+ return methods
265
+
266
+ @classmethod
267
+ def _get_method_info(cls, func_name=None):
268
+ if not hasattr(cls, _METHOD_INFO_ATTR):
269
+ setattr(cls, _METHOD_INFO_ATTR, cls._register_model_methods())
270
+ method_info = getattr(cls, _METHOD_INFO_ATTR)
271
+ if func_name:
272
+ return method_info[func_name]
273
+ return method_info
274
+
275
+
276
+ class _MethodInfo:
277
+
278
+ def __init__(self, method, method_type):
279
+ self.name = method.__name__
280
+ self.signature = build_function_signature(method, method_type)
281
+ self.python_param_types = {
282
+ p.name: p.annotation
283
+ for p in inspect.signature(method).parameters.values()
284
+ if p.annotation != inspect.Parameter.empty
285
+ }
286
+ self.python_param_types.pop('self', None)
@@ -7,14 +7,11 @@ import subprocess
7
7
  import sys
8
8
  import tempfile
9
9
  import time
10
- import traceback
11
10
  import venv
12
11
 
13
12
  from clarifai_grpc.grpc.api import resources_pb2, service_pb2
14
- from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
15
13
 
16
14
  from clarifai.runners.models.model_builder import ModelBuilder
17
- from clarifai.runners.utils.url_fetcher import ensure_urls_downloaded
18
15
  from clarifai.utils.logging import logger
19
16
 
20
17
 
@@ -111,85 +108,13 @@ class ModelRunLocally:
111
108
  for i in range(1):
112
109
  yield request
113
110
 
114
- def _run_model_inference(self, model):
115
- """Perform inference using the model."""
116
- request = self._build_request()
117
- stream_request = self._build_stream_request()
118
-
119
- ensure_urls_downloaded(request)
120
- predict_response = None
121
- generate_response = None
122
- stream_response = None
123
- try:
124
- predict_response = model.predict(request)
125
- except NotImplementedError:
126
- logger.info("Model does not implement predict() method.")
127
- except Exception as e:
128
- logger.error(f"Model Prediction failed: {e}")
129
- traceback.print_exc()
130
- predict_response = service_pb2.MultiOutputResponse(status=status_pb2.Status(
131
- code=status_code_pb2.MODEL_PREDICTION_FAILED,
132
- description="Prediction failed",
133
- details="",
134
- internal_details=str(e),
135
- ))
136
-
137
- if predict_response:
138
- if predict_response.outputs[0].status.code != status_code_pb2.SUCCESS:
139
- logger.error(f"Moddel Prediction failed: {predict_response}")
140
- else:
141
- logger.info(f"Model Prediction succeeded: {predict_response}")
142
-
143
- try:
144
- generate_response = model.generate(request)
145
- except NotImplementedError:
146
- logger.info("Model does not implement generate() method.")
147
- except Exception as e:
148
- logger.error(f"Model Generation failed: {e}")
149
- traceback.print_exc()
150
- generate_response = service_pb2.MultiOutputResponse(status=status_pb2.Status(
151
- code=status_code_pb2.MODEL_GENERATION_FAILED,
152
- description="Generation failed",
153
- details="",
154
- internal_details=str(e),
155
- ))
156
-
157
- if generate_response:
158
- generate_first_res = next(generate_response)
159
- if generate_first_res.outputs[0].status.code != status_code_pb2.SUCCESS:
160
- logger.error(f"Moddel Prediction failed: {generate_first_res}")
161
- else:
162
- logger.info(
163
- f"Model Prediction succeeded for generate and first response: {generate_first_res}")
164
-
165
- try:
166
- stream_response = model.stream(stream_request)
167
- except NotImplementedError:
168
- logger.info("Model does not implement stream() method.")
169
- except Exception as e:
170
- logger.error(f"Model Stream failed: {e}")
171
- traceback.print_exc()
172
- stream_response = service_pb2.MultiOutputResponse(status=status_pb2.Status(
173
- code=status_code_pb2.MODEL_STREAM_FAILED,
174
- description="Stream failed",
175
- details="",
176
- internal_details=str(e),
177
- ))
178
-
179
- if stream_response:
180
- stream_first_res = next(stream_response)
181
- if stream_first_res.outputs[0].status.code != status_code_pb2.SUCCESS:
182
- logger.error(f"Moddel Prediction failed: {stream_first_res}")
183
- else:
184
- logger.info(
185
- f"Model Prediction succeeded for stream and first response: {stream_first_res}")
186
-
187
111
  def _run_test(self):
188
112
  """Test the model locally by making a prediction."""
189
113
  # Create the model
190
114
  model = self.builder.create_model_instance()
191
- # send an inference.
192
- self._run_model_inference(model)
115
+ # call its test method, if it has one
116
+ if hasattr(model, "test"):
117
+ model.test()
193
118
 
194
119
  def test_model(self):
195
120
  """Test the model by running it locally in the virtual environment."""
@@ -82,6 +82,8 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
82
82
  ensure_urls_downloaded(request)
83
83
 
84
84
  resp = self.model.predict_wrapper(request)
85
+ if resp.status.code != status_code_pb2.SUCCESS:
86
+ return service_pb2.RunnerItemOutput(multi_output_response=resp)
85
87
  successes = [o.status.code == status_code_pb2.SUCCESS for o in resp.outputs]
86
88
  if all(successes):
87
89
  status = status_pb2.Status(
@@ -1,3 +1,4 @@
1
+ import os
1
2
  from itertools import tee
2
3
  from typing import Iterator
3
4
 
@@ -6,6 +7,8 @@ from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
6
7
 
7
8
  from ..utils.url_fetcher import ensure_urls_downloaded
8
9
 
10
+ _RAISE_EXCEPTIONS = os.getenv("RAISE_EXCEPTIONS", "false").lower() in ("true", "1")
11
+
9
12
 
10
13
  class ModelServicer(service_pb2_grpc.V2Servicer):
11
14
  """
@@ -33,6 +36,8 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
33
36
  try:
34
37
  return self.model.predict_wrapper(request)
35
38
  except Exception as e:
39
+ if _RAISE_EXCEPTIONS:
40
+ raise
36
41
  return service_pb2.MultiOutputResponse(status=status_pb2.Status(
37
42
  code=status_code_pb2.MODEL_PREDICTION_FAILED,
38
43
  description="Failed",
@@ -50,8 +55,10 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
50
55
  ensure_urls_downloaded(request)
51
56
 
52
57
  try:
53
- return self.model.generate_wrapper(request)
58
+ yield from self.model.generate_wrapper(request)
54
59
  except Exception as e:
60
+ if _RAISE_EXCEPTIONS:
61
+ raise
55
62
  yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
56
63
  code=status_code_pb2.MODEL_PREDICTION_FAILED,
57
64
  description="Failed",
@@ -74,8 +81,10 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
74
81
  ensure_urls_downloaded(req)
75
82
 
76
83
  try:
77
- return self.model.stream_wrapper(request_copy)
84
+ yield from self.model.stream_wrapper(request_copy)
78
85
  except Exception as e:
86
+ if _RAISE_EXCEPTIONS:
87
+ raise
79
88
  yield service_pb2.MultiOutputResponse(status=status_pb2.Status(
80
89
  code=status_code_pb2.MODEL_PREDICTION_FAILED,
81
90
  description="Failed",
@@ -85,6 +85,10 @@ def serve(model_path,
85
85
 
86
86
  model = builder.create_model_instance()
87
87
 
88
+ # `num_threads` can be set in config.yaml or via the environment variable CLARIFAI_NUM_THREADS="<integer>".
89
+ # Note: The value in config.yaml takes precedence over the environment variable.
90
+ num_threads = builder.config.get("num_threads")
91
+
88
92
  # Setup the grpc server for local development.
89
93
  if grpc:
90
94
 
@@ -115,7 +119,7 @@ def serve(model_path,
115
119
  nodepool_id=os.environ["CLARIFAI_NODEPOOL_ID"],
116
120
  compute_cluster_id=os.environ["CLARIFAI_COMPUTE_CLUSTER_ID"],
117
121
  base_url=os.environ.get("CLARIFAI_API_BASE", "https://api.clarifai.com"),
118
- num_parallel_polls=int(os.environ.get("CLARIFAI_NUM_THREADS", 1)),
122
+ num_parallel_polls=num_threads,
119
123
  )
120
124
  runner.start() # start the runner to fetch work from the API.
121
125