clarifai 11.2.3rc1__py3-none-any.whl → 11.2.3rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (248) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  4. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  5. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  11. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  12. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  13. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  14. clarifai/cli/base.py +81 -228
  15. clarifai/cli/compute_cluster.py +17 -25
  16. clarifai/cli/deployment.py +41 -67
  17. clarifai/cli/model.py +39 -26
  18. clarifai/cli/nodepool.py +40 -59
  19. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  21. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  23. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  24. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  25. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  26. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  27. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  28. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  29. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  30. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  31. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  32. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  33. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  34. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  35. clarifai/client/app.py +1 -1
  36. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  37. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  38. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  39. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  40. clarifai/client/cli/__init__.py +0 -0
  41. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  42. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  43. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  44. clarifai/client/cli/base_cli.py +88 -0
  45. clarifai/client/cli/model_cli.py +29 -0
  46. clarifai/client/model.py +159 -393
  47. clarifai/client/model_client.py +502 -0
  48. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  49. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  50. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  51. clarifai/constants/__pycache__/{model.cpython-312.pyc → model.cpython-310.pyc} +0 -0
  52. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  53. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  54. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  55. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  56. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  57. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  58. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  59. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  60. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  61. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  62. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  63. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  64. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  65. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  66. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  67. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  68. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  69. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  70. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  71. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  72. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  73. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  74. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  75. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  76. clarifai/runners/__init__.py +2 -7
  77. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  78. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  79. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  80. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +42 -0
  81. clarifai/runners/dockerfile_template/Dockerfile.nim +71 -0
  82. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  83. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  84. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  85. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  86. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  87. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  88. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  89. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  90. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  91. clarifai/runners/models/model_builder.py +138 -51
  92. clarifai/runners/models/model_class.py +441 -28
  93. clarifai/runners/models/model_class_refract.py +80 -0
  94. clarifai/runners/models/model_run_locally.py +25 -89
  95. clarifai/runners/models/model_runner.py +8 -0
  96. clarifai/runners/models/model_servicer.py +11 -2
  97. clarifai/runners/models/model_upload.py +607 -0
  98. clarifai/runners/models/temp.py +25 -0
  99. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  100. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  101. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  102. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  103. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  104. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  105. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  106. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  107. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  108. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  109. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  110. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  111. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  112. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  113. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  114. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  115. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  116. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  117. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  118. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  119. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  120. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  121. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  122. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  123. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  124. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  125. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  126. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  127. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  128. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  129. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  130. clarifai/runners/utils/code_script.py +217 -0
  131. clarifai/runners/utils/const.py +8 -9
  132. clarifai/runners/utils/data_handler.py +271 -210
  133. clarifai/runners/utils/data_handler_refract.py +213 -0
  134. clarifai/runners/utils/data_types.py +473 -0
  135. clarifai/runners/utils/data_utils.py +165 -0
  136. clarifai/runners/utils/loader.py +6 -36
  137. clarifai/runners/utils/logger.py +0 -0
  138. clarifai/runners/utils/method_signatures.py +518 -0
  139. clarifai/runners/utils/serializers.py +222 -0
  140. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  141. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  142. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  143. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  144. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  145. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  146. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  147. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  148. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  149. clarifai/utils/cli.py +33 -132
  150. clarifai/utils/constants.py +0 -4
  151. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  152. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  153. clarifai/utils/misc.py +0 -2
  154. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  155. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  156. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  157. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  158. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  159. {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc2.dist-info}/METADATA +14 -3
  160. clarifai-11.2.3rc2.dist-info/RECORD +238 -0
  161. {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc2.dist-info}/WHEEL +1 -1
  162. clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
  163. clarifai/__pycache__/errors.cpython-312.pyc +0 -0
  164. clarifai/__pycache__/versions.cpython-312.pyc +0 -0
  165. clarifai/cli/__pycache__/__init__.cpython-312.pyc +0 -0
  166. clarifai/cli/__pycache__/base.cpython-312.pyc +0 -0
  167. clarifai/cli/__pycache__/compute_cluster.cpython-312.pyc +0 -0
  168. clarifai/cli/__pycache__/deployment.cpython-312.pyc +0 -0
  169. clarifai/cli/__pycache__/model.cpython-312.pyc +0 -0
  170. clarifai/cli/__pycache__/nodepool.cpython-312.pyc +0 -0
  171. clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
  172. clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
  173. clarifai/client/__pycache__/base.cpython-312.pyc +0 -0
  174. clarifai/client/__pycache__/compute_cluster.cpython-312.pyc +0 -0
  175. clarifai/client/__pycache__/dataset.cpython-312.pyc +0 -0
  176. clarifai/client/__pycache__/deployment.cpython-312.pyc +0 -0
  177. clarifai/client/__pycache__/input.cpython-312.pyc +0 -0
  178. clarifai/client/__pycache__/lister.cpython-312.pyc +0 -0
  179. clarifai/client/__pycache__/model.cpython-312.pyc +0 -0
  180. clarifai/client/__pycache__/model_client.cpython-312.pyc +0 -0
  181. clarifai/client/__pycache__/module.cpython-312.pyc +0 -0
  182. clarifai/client/__pycache__/nodepool.cpython-312.pyc +0 -0
  183. clarifai/client/__pycache__/search.cpython-312.pyc +0 -0
  184. clarifai/client/__pycache__/user.cpython-312.pyc +0 -0
  185. clarifai/client/__pycache__/workflow.cpython-312.pyc +0 -0
  186. clarifai/client/auth/__pycache__/__init__.cpython-312.pyc +0 -0
  187. clarifai/client/auth/__pycache__/helper.cpython-312.pyc +0 -0
  188. clarifai/client/auth/__pycache__/register.cpython-312.pyc +0 -0
  189. clarifai/client/auth/__pycache__/stub.cpython-312.pyc +0 -0
  190. clarifai/constants/__pycache__/base.cpython-312.pyc +0 -0
  191. clarifai/constants/__pycache__/dataset.cpython-312.pyc +0 -0
  192. clarifai/constants/__pycache__/input.cpython-312.pyc +0 -0
  193. clarifai/constants/__pycache__/search.cpython-312.pyc +0 -0
  194. clarifai/constants/__pycache__/workflow.cpython-312.pyc +0 -0
  195. clarifai/datasets/__pycache__/__init__.cpython-312.pyc +0 -0
  196. clarifai/datasets/export/__pycache__/__init__.cpython-312.pyc +0 -0
  197. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-312.pyc +0 -0
  198. clarifai/datasets/upload/__pycache__/__init__.cpython-312.pyc +0 -0
  199. clarifai/datasets/upload/__pycache__/base.cpython-312.pyc +0 -0
  200. clarifai/datasets/upload/__pycache__/features.cpython-312.pyc +0 -0
  201. clarifai/datasets/upload/__pycache__/image.cpython-312.pyc +0 -0
  202. clarifai/datasets/upload/__pycache__/multimodal.cpython-312.pyc +0 -0
  203. clarifai/datasets/upload/__pycache__/text.cpython-312.pyc +0 -0
  204. clarifai/datasets/upload/__pycache__/utils.cpython-312.pyc +0 -0
  205. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-312.pyc +0 -0
  206. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-312.pyc +0 -0
  207. clarifai/modules/__pycache__/__init__.cpython-312.pyc +0 -0
  208. clarifai/modules/__pycache__/css.cpython-312.pyc +0 -0
  209. clarifai/runners/__pycache__/__init__.cpython-312.pyc +0 -0
  210. clarifai/runners/__pycache__/server.cpython-312.pyc +0 -0
  211. clarifai/runners/models/__pycache__/__init__.cpython-312.pyc +0 -0
  212. clarifai/runners/models/__pycache__/base_typed_model.cpython-312.pyc +0 -0
  213. clarifai/runners/models/__pycache__/model_builder.cpython-312.pyc +0 -0
  214. clarifai/runners/models/__pycache__/model_class.cpython-312.pyc +0 -0
  215. clarifai/runners/models/__pycache__/model_run_locally.cpython-312.pyc +0 -0
  216. clarifai/runners/models/__pycache__/model_runner.cpython-312.pyc +0 -0
  217. clarifai/runners/models/__pycache__/model_servicer.cpython-312.pyc +0 -0
  218. clarifai/runners/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  219. clarifai/runners/utils/__pycache__/const.cpython-312.pyc +0 -0
  220. clarifai/runners/utils/__pycache__/data_handler.cpython-312.pyc +0 -0
  221. clarifai/runners/utils/__pycache__/data_types.cpython-312.pyc +0 -0
  222. clarifai/runners/utils/__pycache__/data_utils.cpython-312.pyc +0 -0
  223. clarifai/runners/utils/__pycache__/loader.cpython-312.pyc +0 -0
  224. clarifai/runners/utils/__pycache__/method_signatures.cpython-312.pyc +0 -0
  225. clarifai/runners/utils/__pycache__/serializers.cpython-312.pyc +0 -0
  226. clarifai/runners/utils/__pycache__/url_fetcher.cpython-312.pyc +0 -0
  227. clarifai/schema/__pycache__/search.cpython-312.pyc +0 -0
  228. clarifai/urls/__pycache__/helper.cpython-312.pyc +0 -0
  229. clarifai/utils/__pycache__/__init__.cpython-312.pyc +0 -0
  230. clarifai/utils/__pycache__/cli.cpython-312.pyc +0 -0
  231. clarifai/utils/__pycache__/config.cpython-312.pyc +0 -0
  232. clarifai/utils/__pycache__/constants.cpython-312.pyc +0 -0
  233. clarifai/utils/__pycache__/logging.cpython-312.pyc +0 -0
  234. clarifai/utils/__pycache__/misc.cpython-312.pyc +0 -0
  235. clarifai/utils/__pycache__/model_train.cpython-312.pyc +0 -0
  236. clarifai/utils/config.py +0 -105
  237. clarifai/utils/config.py~ +0 -145
  238. clarifai/utils/evaluation/__pycache__/__init__.cpython-312.pyc +0 -0
  239. clarifai/utils/evaluation/__pycache__/helpers.cpython-312.pyc +0 -0
  240. clarifai/utils/evaluation/__pycache__/main.cpython-312.pyc +0 -0
  241. clarifai/workflows/__pycache__/__init__.cpython-312.pyc +0 -0
  242. clarifai/workflows/__pycache__/export.cpython-312.pyc +0 -0
  243. clarifai/workflows/__pycache__/utils.cpython-312.pyc +0 -0
  244. clarifai/workflows/__pycache__/validate.cpython-312.pyc +0 -0
  245. clarifai-11.2.3rc1.dist-info/RECORD +0 -185
  246. {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc2.dist-info}/LICENSE +0 -0
  247. {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc2.dist-info}/entry_points.txt +0 -0
  248. {clarifai-11.2.3rc1.dist-info → clarifai-11.2.3rc2.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,502 @@
1
+ import inspect
2
+ import time
3
+ from typing import Any, Dict, Iterator, List
4
+
5
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
+ from clarifai_grpc.grpc.api.status import status_code_pb2
7
+
8
+ from clarifai.constants.model import MAX_MODEL_PREDICT_INPUTS
9
+ from clarifai.errors import UserError
10
+ from clarifai.runners.utils import code_script, method_signatures
11
+ from clarifai.runners.utils.data_utils import is_openai_chat_format
12
+ from clarifai.runners.utils.method_signatures import (CompatibilitySerializer, deserialize,
13
+ get_stream_from_signature, serialize,
14
+ signatures_from_json)
15
+ from clarifai.utils.logging import logger
16
+ from clarifai.utils.misc import BackoffIterator, status_is_retryable
17
+
18
+
19
+ class ModelClient:
20
+ '''
21
+ Client for calling model predict, generate, and stream methods.
22
+ '''
23
+
24
+ def __init__(self, stub, request_template: service_pb2.PostModelOutputsRequest = None):
25
+ '''
26
+ Initialize the model client.
27
+
28
+ Args:
29
+ stub: The gRPC stub for the model.
30
+ request_template: The template for the request to send to the model, including
31
+ common fields like model_id, model_version, cluster, etc.
32
+ '''
33
+ self.STUB = stub
34
+ self.request_template = request_template or service_pb2.PostModelOutputsRequest()
35
+ self._method_signatures = None
36
+ self._defined = False
37
+
38
+ def fetch(self):
39
+ '''
40
+ Fetch function signature definitions from the model and define the functions in the client
41
+ '''
42
+ if self._defined:
43
+ return
44
+ try:
45
+ self._fetch_signatures()
46
+ self._define_functions()
47
+ finally:
48
+ self._defined = True
49
+
50
+ def __getattr__(self, name):
51
+ if not self._defined:
52
+ self.fetch()
53
+ return self.__getattribute__(name)
54
+
55
+ def _fetch_signatures(self):
56
+ '''
57
+ Fetch the method signatures from the model.
58
+
59
+ Returns:
60
+ Dict: The method signatures.
61
+ '''
62
+ #request = resources_pb2.GetModelSignaturesRequest()
63
+ #response = self.stub.GetModelSignatures(request)
64
+ #self._method_signatures = json.loads(response.signatures) # or define protos
65
+ # TODO this could use a new endpoint to get the signatures
66
+ # for local grpc models, we'll also have to add the endpoint to the model servicer
67
+ # for now we'll just use the predict endpoint with a special method name
68
+
69
+ request = service_pb2.PostModelOutputsRequest()
70
+ request.CopyFrom(self.request_template)
71
+ # request.model.model_version.output_info.params['_method_name'] = '_GET_SIGNATURES'
72
+ inp = request.inputs.add() # empty input for this method
73
+ inp.data.parts.add() # empty part for this input
74
+ inp.data.metadata['_method_name'] = '_GET_SIGNATURES'
75
+ start_time = time.time()
76
+ backoff_iterator = BackoffIterator(10)
77
+ while True:
78
+ response = self.STUB.PostModelOutputs(request)
79
+ if status_is_retryable(
80
+ response.status.code) and time.time() - start_time < 60 * 10: # 10 minutes
81
+ logger.info(f"Retrying model info fetch with response {response.status!r}")
82
+ time.sleep(next(backoff_iterator))
83
+ continue
84
+ break
85
+ if (response.status.code == status_code_pb2.INPUT_UNSUPPORTED_FORMAT or
86
+ (response.status.code == status_code_pb2.SUCCESS and
87
+ response.outputs[0].data.text.raw == '')):
88
+ # return codes/values from older models that don't support _GET_SIGNATURES
89
+ self._method_signatures = {}
90
+ self._define_compatability_functions()
91
+ return
92
+ if response.status.code != status_code_pb2.SUCCESS:
93
+ raise Exception(f"Model failed with response {response!r}")
94
+ self._method_signatures = signatures_from_json(response.outputs[0].data.text.raw)
95
+
96
+ def _define_functions(self):
97
+ '''
98
+ Define the functions based on the method signatures.
99
+ '''
100
+ for method_name, method_signature in self._method_signatures.items():
101
+ # define the function in this client instance
102
+ if resources_pb2.RunnerMethodType.Name(method_signature.method_type) == 'UNARY_UNARY':
103
+ call_func = self._predict
104
+ elif resources_pb2.RunnerMethodType.Name(method_signature.method_type) == 'UNARY_STREAMING':
105
+ call_func = self._generate
106
+ elif resources_pb2.RunnerMethodType.Name(
107
+ method_signature.method_type) == 'STREAMING_STREAMING':
108
+ call_func = self._stream
109
+ else:
110
+ raise ValueError(f"Unknown method type {method_signature.method_type}")
111
+
112
+ # method argnames, in order, collapsing nested keys to corresponding user function args
113
+ method_argnames = []
114
+ for var in method_signature.input_fields:
115
+ outer = var.name.split('.', 1)[0]
116
+ if outer in method_argnames:
117
+ continue
118
+ method_argnames.append(outer)
119
+
120
+ def bind_f(method_name, method_argnames, call_func):
121
+
122
+ def f(*args, **kwargs):
123
+ if len(args) > len(method_argnames):
124
+ raise TypeError(
125
+ f"{method_name}() takes {len(method_argnames)} positional arguments but {len(args)} were given"
126
+ )
127
+
128
+ if len(args) + len(kwargs) > len(method_argnames):
129
+ raise TypeError(
130
+ f"{method_name}() got an unexpected keyword argument {next(iter(kwargs))}")
131
+ if len(args) == 1 and (not kwargs) and isinstance(args[0], list):
132
+ batch_inputs = args[0]
133
+ # Validate each input is a dictionary
134
+ is_batch_input_valid = all(isinstance(input, dict) for input in batch_inputs)
135
+ if is_batch_input_valid and (not is_openai_chat_format(batch_inputs)):
136
+ # If the batch input is valid, call the function with the batch inputs and the method name
137
+ return call_func(batch_inputs, method_name)
138
+
139
+ for name, arg in zip(method_argnames, args): # handle positional with zip shortest
140
+ if name in kwargs:
141
+ raise TypeError(f"Multiple values for argument {name}")
142
+ kwargs[name] = arg
143
+ return call_func(kwargs, method_name)
144
+
145
+ return f
146
+
147
+ # need to bind method_name to the value, not the mutating loop variable
148
+ f = bind_f(method_name, method_argnames, call_func)
149
+
150
+ # set names, annotations and docstrings
151
+ f.__name__ = method_name
152
+ f.__qualname__ = f'{self.__class__.__name__}.{method_name}'
153
+ input_annotations = code_script._get_annotations_source(method_signature)
154
+ return_annotation = input_annotations.pop('return', (None, None))[0]
155
+ sig = inspect.signature(f).replace(
156
+ parameters=[
157
+ inspect.Parameter(k, inspect.Parameter.POSITIONAL_OR_KEYWORD, annotation=v[0])
158
+ for k, v in input_annotations.items()
159
+ ],
160
+ return_annotation=return_annotation,
161
+ )
162
+ f.__signature__ = sig
163
+ f.__doc__ = method_signature.description
164
+ setattr(self, method_name, f)
165
+
166
+ def available_methods(self) -> List[str]:
167
+ """Get the available methods for this model.
168
+
169
+ Returns:
170
+ List[str]: The available methods.
171
+ """
172
+ if not self._defined:
173
+ self.fetch()
174
+ return self._method_signatures.keys()
175
+
176
+ def method_signature(self, method_name: str) -> str:
177
+ """Get the method signature for a method.
178
+
179
+ Args:
180
+ method_name (str): The name of the method.
181
+
182
+ Returns:
183
+ str: The method signature.
184
+ """
185
+ if not self._defined:
186
+ self.fetch()
187
+ return method_signatures.get_method_signature(self._method_signatures[method_name])
188
+
189
+ def generate_client_script(self) -> str:
190
+ """Generate a client script for this model.
191
+
192
+ Returns:
193
+ str: The client script.
194
+ """
195
+ if not self._defined:
196
+ self.fetch()
197
+ method_signatures = []
198
+ for _, method_signature in self._method_signatures.items():
199
+ method_signatures.append(method_signature)
200
+ return code_script.generate_client_script(
201
+ method_signatures,
202
+ user_id=self.request_template.user_app_id.user_id,
203
+ app_id=self.request_template.user_app_id.app_id,
204
+ model_id=self.request_template.model_id)
205
+
206
+ def _define_compatability_functions(self):
207
+
208
+ serializer = CompatibilitySerializer()
209
+
210
+ def predict(input: Any) -> Any:
211
+ proto = resources_pb2.Input()
212
+ serializer.serialize(proto.data, input)
213
+ # always use text.raw for compat
214
+ if proto.data.string_value:
215
+ proto.data.text.raw = proto.data.string_value
216
+ proto.data.string_value = ''
217
+ response = self._predict_by_proto([proto])
218
+ if response.status.code != status_code_pb2.SUCCESS:
219
+ raise Exception(f"Model predict failed with response {response!r}")
220
+ response_data = response.outputs[0].data
221
+ if response_data.text.raw:
222
+ response_data.string_value = response_data.text.raw
223
+ response_data.text.raw = ''
224
+ return serializer.deserialize(response_data)
225
+
226
+ self.predict = predict
227
+
228
+ def _predict(
229
+ self,
230
+ inputs, # TODO set up functions according to fetched signatures?
231
+ method_name: str = 'predict',
232
+ ) -> Any:
233
+
234
+ input_signature = self._method_signatures[method_name].input_fields
235
+ output_signature = self._method_signatures[method_name].output_fields
236
+
237
+ batch_input = True
238
+ if isinstance(inputs, dict):
239
+ inputs = [inputs]
240
+ batch_input = False
241
+
242
+ proto_inputs = []
243
+ for input in inputs:
244
+ proto = resources_pb2.Input()
245
+
246
+ serialize(input, input_signature, proto.data)
247
+ proto_inputs.append(proto)
248
+
249
+ response = self._predict_by_proto(proto_inputs, method_name)
250
+
251
+ outputs = []
252
+ for output in response.outputs:
253
+ outputs.append(deserialize(output.data, output_signature, is_output=True))
254
+ if batch_input:
255
+ return outputs
256
+ return outputs[0]
257
+
258
+ def _predict_by_proto(
259
+ self,
260
+ inputs: List[resources_pb2.Input],
261
+ method_name: str = None,
262
+ inference_params: Dict = None,
263
+ output_config: Dict = None,
264
+ ) -> service_pb2.MultiOutputResponse:
265
+ """Predicts the model based on the given inputs.
266
+
267
+ Args:
268
+ inputs (List[resources_pb2.Input]): The inputs to predict.
269
+ method_name (str): The remote method name to call.
270
+ inference_params (Dict): Inference parameters to override.
271
+ output_config (Dict): Output configuration to override.
272
+
273
+ Returns:
274
+ service_pb2.MultiOutputResponse: The prediction response(s).
275
+ """
276
+ if not isinstance(inputs, list):
277
+ raise UserError('Invalid inputs, inputs must be a list of Input objects.')
278
+ if len(inputs) > MAX_MODEL_PREDICT_INPUTS:
279
+ raise UserError(f"Too many inputs. Max is {MAX_MODEL_PREDICT_INPUTS}.")
280
+
281
+ request = service_pb2.PostModelOutputsRequest()
282
+ request.CopyFrom(self.request_template)
283
+
284
+ request.inputs.extend(inputs)
285
+
286
+ if method_name:
287
+ # TODO put in new proto field?
288
+ for inp in request.inputs:
289
+ inp.data.metadata['_method_name'] = method_name
290
+ if inference_params:
291
+ request.model.model_version.output_info.params.update(inference_params)
292
+ if output_config:
293
+ request.model.model_version.output_info.output_config.MergeFrom(
294
+ resources_pb2.OutputConfig(**output_config))
295
+
296
+ start_time = time.time()
297
+ backoff_iterator = BackoffIterator(10)
298
+ while True:
299
+ response = self.STUB.PostModelOutputs(request)
300
+ if status_is_retryable(
301
+ response.status.code) and time.time() - start_time < 60 * 10: # 10 minutes
302
+ logger.info(f"Model predict failed with response {response!r}")
303
+ time.sleep(next(backoff_iterator))
304
+ continue
305
+
306
+ if response.status.code != status_code_pb2.SUCCESS:
307
+ raise Exception(f"Model predict failed with response {response!r}")
308
+ break
309
+ return response
310
+
311
+ def _generate(
312
+ self,
313
+ inputs, # TODO set up functions according to fetched signatures?
314
+ method_name: str = 'generate',
315
+ ) -> Any:
316
+ input_signature = self._method_signatures[method_name].input_fields
317
+ output_signature = self._method_signatures[method_name].output_fields
318
+
319
+ batch_input = True
320
+ if isinstance(inputs, dict):
321
+ inputs = [inputs]
322
+ batch_input = False
323
+
324
+ proto_inputs = []
325
+ for input in inputs:
326
+ proto = resources_pb2.Input()
327
+ serialize(input, input_signature, proto.data)
328
+ proto_inputs.append(proto)
329
+
330
+ response_stream = self._generate_by_proto(proto_inputs, method_name)
331
+
332
+ for response in response_stream:
333
+ outputs = []
334
+ for output in response.outputs:
335
+ outputs.append(deserialize(output.data, output_signature, is_output=True))
336
+ if batch_input:
337
+ yield outputs
338
+ else:
339
+ yield outputs[0]
340
+
341
+ def _generate_by_proto(
342
+ self,
343
+ inputs: List[resources_pb2.Input],
344
+ method_name: str = None,
345
+ inference_params: Dict = {},
346
+ output_config: Dict = {},
347
+ ):
348
+ """Generate the stream output on model based on the given inputs.
349
+
350
+ Args:
351
+ inputs (list[Input]): The inputs to generate, must be less than 128.
352
+ method_name (str): The remote method name to call.
353
+ inference_params (dict): The inference params to override.
354
+ output_config (dict): The output config to override.
355
+ """
356
+ if not isinstance(inputs, list):
357
+ raise UserError('Invalid inputs, inputs must be a list of Input objects.')
358
+ if len(inputs) > MAX_MODEL_PREDICT_INPUTS:
359
+ raise UserError(f"Too many inputs. Max is {MAX_MODEL_PREDICT_INPUTS}."
360
+ ) # TODO Use Chunker for inputs len > 128
361
+
362
+ request = service_pb2.PostModelOutputsRequest()
363
+ request.CopyFrom(self.request_template)
364
+
365
+ request.inputs.extend(inputs)
366
+
367
+ if method_name:
368
+ # TODO put in new proto field?
369
+ for inp in request.inputs:
370
+ inp.data.metadata['_method_name'] = method_name
371
+ if inference_params:
372
+ request.model.model_version.output_info.params.update(inference_params)
373
+ if output_config:
374
+ request.model.model_version.output_info.output_config.MergeFromDict(output_config)
375
+
376
+ start_time = time.time()
377
+ backoff_iterator = BackoffIterator(10)
378
+ started = False
379
+ while not started:
380
+ stream_response = self.STUB.GenerateModelOutputs(request)
381
+ try:
382
+ response = next(stream_response) # get the first response
383
+ except StopIteration:
384
+ raise Exception("Model Generate failed with no response")
385
+ if status_is_retryable(response.status.code) and \
386
+ time.time() - start_time < 60 * 10:
387
+ logger.info("Model is still deploying, please wait...")
388
+ time.sleep(next(backoff_iterator))
389
+ continue
390
+ if response.status.code != status_code_pb2.SUCCESS:
391
+ raise Exception(f"Model Generate failed with response {response.status!r}")
392
+ started = True
393
+
394
+ yield response # yield the first response
395
+
396
+ for response in stream_response:
397
+ if response.status.code != status_code_pb2.SUCCESS:
398
+ raise Exception(f"Model Generate failed with response {response.status!r}")
399
+ yield response
400
+
401
+ def _stream(
402
+ self,
403
+ inputs,
404
+ method_name: str = 'stream',
405
+ ) -> Any:
406
+ input_signature = self._method_signatures[method_name].input_fields
407
+ output_signature = self._method_signatures[method_name].output_fields
408
+
409
+ if isinstance(inputs, list):
410
+ assert len(inputs) == 1, 'streaming methods do not support batched calls'
411
+ inputs = inputs[0]
412
+ assert isinstance(inputs, dict)
413
+ kwargs = inputs
414
+
415
+ # find the streaming vars in the input signature, and the streaming input python param
416
+ stream_sig = get_stream_from_signature(input_signature)
417
+ if stream_sig is None:
418
+ raise ValueError("Streaming method must have a Stream input")
419
+ stream_argname = stream_sig.name
420
+
421
+ # get the streaming input generator from the user-provided function arg values
422
+ user_inputs_generator = kwargs.pop(stream_argname)
423
+
424
+ def _input_proto_stream():
425
+ # first item contains all the inputs and the first stream item
426
+ proto = resources_pb2.Input()
427
+ try:
428
+ item = next(user_inputs_generator)
429
+ except StopIteration:
430
+ return # no items to stream
431
+ kwargs[stream_argname] = item
432
+ serialize(kwargs, input_signature, proto.data)
433
+
434
+ yield proto
435
+
436
+ # subsequent items are just the stream items
437
+ for item in user_inputs_generator:
438
+ proto = resources_pb2.Input()
439
+ serialize({stream_argname: item}, [stream_sig], proto.data)
440
+ yield proto
441
+
442
+ response_stream = self._stream_by_proto(_input_proto_stream(), method_name)
443
+
444
+ for response in response_stream:
445
+ assert len(response.outputs) == 1, 'streaming methods must have exactly one output'
446
+ yield deserialize(response.outputs[0].data, output_signature, is_output=True)
447
+
448
+ def _req_iterator(self,
449
+ input_iterator: Iterator[List[resources_pb2.Input]],
450
+ method_name: str = None,
451
+ inference_params: Dict = {},
452
+ output_config: Dict = {}):
453
+ request = service_pb2.PostModelOutputsRequest()
454
+ request.CopyFrom(self.request_template)
455
+ if inference_params:
456
+ request.model.model_version.output_info.params.update(inference_params)
457
+ if output_config:
458
+ request.model.model_version.output_info.output_config.MergeFromDict(output_config)
459
+ for inputs in input_iterator:
460
+ req = service_pb2.PostModelOutputsRequest()
461
+ req.CopyFrom(request)
462
+ if isinstance(inputs, list):
463
+ req.inputs.extend(inputs)
464
+ else:
465
+ req.inputs.append(inputs)
466
+ # TODO: put into new proto field?
467
+ if method_name:
468
+ for inp in req.inputs:
469
+ inp.data.metadata['_method_name'] = method_name
470
+ yield req
471
+
472
+ def _stream_by_proto(self,
473
+ inputs: Iterator[List[resources_pb2.Input]],
474
+ method_name: str = None,
475
+ inference_params: Dict = {},
476
+ output_config: Dict = {}):
477
+ """Generate the stream output on model based on the given stream of inputs.
478
+ """
479
+ # if not isinstance(inputs, Iterator[List[Input]]):
480
+ # raise UserError('Invalid inputs, inputs must be a iterator of list of Input objects.')
481
+
482
+ request = self._req_iterator(inputs, method_name, inference_params, output_config)
483
+
484
+ start_time = time.time()
485
+ backoff_iterator = BackoffIterator(10)
486
+ generation_started = False
487
+ while True:
488
+ if generation_started:
489
+ break
490
+ stream_response = self.STUB.StreamModelOutputs(request)
491
+ for response in stream_response:
492
+ if status_is_retryable(response.status.code) and \
493
+ time.time() - start_time < 60 * 10:
494
+ logger.info("Model is still deploying, please wait...")
495
+ time.sleep(next(backoff_iterator))
496
+ break
497
+ if response.status.code != status_code_pb2.SUCCESS:
498
+ raise Exception(f"Model Predict failed with response {response.status!r}")
499
+ else:
500
+ if not generation_started:
501
+ generation_started = True
502
+ yield response
@@ -1,14 +1,9 @@
1
- from .models.base_typed_model import AnyAnyModel, TextInputModel, VisualInputModel
2
1
  from .models.model_builder import ModelBuilder
2
+ from .models.model_class import ModelClass
3
3
  from .models.model_runner import ModelRunner
4
- from .utils.data_handler import InputDataHandler, OutputDataHandler
5
4
 
6
5
  __all__ = [
7
6
  "ModelRunner",
8
7
  "ModelBuilder",
9
- "InputDataHandler",
10
- "OutputDataHandler",
11
- "AnyAnyModel",
12
- "TextInputModel",
13
- "VisualInputModel",
8
+ "ModelClass",
14
9
  ]
@@ -0,0 +1,31 @@
1
+ ARG BASE_IMAGE=${BASE_IMAGE}
2
+ FROM ${BASE_IMAGE} as build
3
+
4
+ # Set the working directory to /app
5
+ WORKDIR /app
6
+
7
+ COPY requirements.txt .
8
+ # Install requirements and cleanup before leaving this line.
9
+ # Note(zeiler): this could be in a future template as {{model_python_deps}}
10
+ RUN pip install --no-cache-dir -r requirements.txt
11
+
12
+ # Install Clarifai SDK
13
+ RUN pip install --no-cache-dir clarifai
14
+
15
+ # These will be set by the templaing system.
16
+ ENV CLARIFAI_PAT=${CLARIFAI_PAT}
17
+ ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
18
+ ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
19
+ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
20
+ ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
21
+ ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
22
+
23
+ # Copy the current folder into /app/model_dir that the SDK will expect.
24
+ COPY . /app/model_dir/${name}
25
+
26
+ # Add the model directory to the python path.
27
+ ENV PYTHONPATH "${PYTHONPATH}:/app/model_dir/${name}"
28
+
29
+ # Finally run the clarifai entrypoint to start the runner loop and local dev server.
30
+ # Note(zeiler): we may want to make this a clarifai CLI call.
31
+ CMD ["-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]
@@ -0,0 +1,42 @@
1
+ ARG TARGET_PLATFORM=linux/amd64
2
+ FROM --platform=$TARGET_PLATFORM ${BASE_IMAGE} as build
3
+
4
+ ENV DEBIAN_FRONTEND=noninteractive
5
+
6
+ #############################
7
+ # User specific requirements
8
+ #############################
9
+ COPY requirements.txt .
10
+
11
+ # Install requirements and cleanup before leaving this line.
12
+ # Note(zeiler): this could be in a future template as {{model_python_deps}}
13
+ RUN pip install --no-cache-dir -r requirements.txt
14
+
15
+ # Install Clarifai SDK
16
+ RUN pip install --no-cache-dir clarifai
17
+
18
+ # These will be set by the templaing system.
19
+ ENV CLARIFAI_PAT=${CLARIFAI_PAT}
20
+ ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
21
+ ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
22
+ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
23
+ ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
24
+ ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
25
+
26
+ # Set the NUMBA cache dir to /tmp
27
+ ENV NUMBA_CACHE_DIR=/tmp/numba_cache
28
+ ENV HOME=/tmp
29
+
30
+ # Set the working directory to /app
31
+ WORKDIR /app
32
+
33
+ # Copy the current folder into /app/model_dir that the SDK will expect.
34
+ # Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
35
+ COPY . /app/model_dir/${name}
36
+
37
+ # Add the model directory to the python path.
38
+ ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
39
+
40
+ # Finally run the clarifai entrypoint to start the runner loop and local dev server.
41
+ # Note(zeiler): we may want to make this a clarifai CLI call.
42
+ CMD ["-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]