clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (300) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/cli/__main__.py +1 -1
  3. clarifai/cli/base.py +144 -136
  4. clarifai/cli/compute_cluster.py +45 -31
  5. clarifai/cli/deployment.py +93 -76
  6. clarifai/cli/model.py +578 -180
  7. clarifai/cli/nodepool.py +100 -82
  8. clarifai/client/__init__.py +12 -2
  9. clarifai/client/app.py +973 -911
  10. clarifai/client/auth/helper.py +345 -342
  11. clarifai/client/auth/register.py +7 -7
  12. clarifai/client/auth/stub.py +107 -106
  13. clarifai/client/base.py +185 -178
  14. clarifai/client/compute_cluster.py +214 -180
  15. clarifai/client/dataset.py +793 -698
  16. clarifai/client/deployment.py +55 -50
  17. clarifai/client/input.py +1223 -1088
  18. clarifai/client/lister.py +47 -45
  19. clarifai/client/model.py +1939 -1717
  20. clarifai/client/model_client.py +525 -502
  21. clarifai/client/module.py +82 -73
  22. clarifai/client/nodepool.py +358 -213
  23. clarifai/client/runner.py +58 -0
  24. clarifai/client/search.py +342 -309
  25. clarifai/client/user.py +419 -414
  26. clarifai/client/workflow.py +294 -274
  27. clarifai/constants/dataset.py +11 -17
  28. clarifai/constants/model.py +8 -2
  29. clarifai/datasets/export/inputs_annotations.py +233 -217
  30. clarifai/datasets/upload/base.py +63 -51
  31. clarifai/datasets/upload/features.py +43 -38
  32. clarifai/datasets/upload/image.py +237 -207
  33. clarifai/datasets/upload/loaders/coco_captions.py +34 -32
  34. clarifai/datasets/upload/loaders/coco_detection.py +72 -65
  35. clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
  36. clarifai/datasets/upload/loaders/xview_detection.py +274 -132
  37. clarifai/datasets/upload/multimodal.py +55 -46
  38. clarifai/datasets/upload/text.py +55 -47
  39. clarifai/datasets/upload/utils.py +250 -234
  40. clarifai/errors.py +51 -50
  41. clarifai/models/api.py +260 -238
  42. clarifai/modules/css.py +50 -50
  43. clarifai/modules/pages.py +33 -33
  44. clarifai/rag/rag.py +312 -288
  45. clarifai/rag/utils.py +91 -84
  46. clarifai/runners/models/model_builder.py +906 -802
  47. clarifai/runners/models/model_class.py +370 -331
  48. clarifai/runners/models/model_run_locally.py +459 -419
  49. clarifai/runners/models/model_runner.py +170 -162
  50. clarifai/runners/models/model_servicer.py +78 -70
  51. clarifai/runners/server.py +111 -101
  52. clarifai/runners/utils/code_script.py +225 -187
  53. clarifai/runners/utils/const.py +4 -1
  54. clarifai/runners/utils/data_types/__init__.py +12 -0
  55. clarifai/runners/utils/data_types/data_types.py +598 -0
  56. clarifai/runners/utils/data_utils.py +387 -440
  57. clarifai/runners/utils/loader.py +247 -227
  58. clarifai/runners/utils/method_signatures.py +411 -386
  59. clarifai/runners/utils/openai_convertor.py +108 -109
  60. clarifai/runners/utils/serializers.py +175 -179
  61. clarifai/runners/utils/url_fetcher.py +35 -35
  62. clarifai/schema/search.py +56 -63
  63. clarifai/urls/helper.py +125 -102
  64. clarifai/utils/cli.py +129 -123
  65. clarifai/utils/config.py +127 -87
  66. clarifai/utils/constants.py +49 -0
  67. clarifai/utils/evaluation/helpers.py +503 -466
  68. clarifai/utils/evaluation/main.py +431 -393
  69. clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
  70. clarifai/utils/logging.py +324 -306
  71. clarifai/utils/misc.py +60 -56
  72. clarifai/utils/model_train.py +165 -146
  73. clarifai/utils/protobuf.py +126 -103
  74. clarifai/versions.py +3 -1
  75. clarifai/workflows/export.py +48 -50
  76. clarifai/workflows/utils.py +39 -36
  77. clarifai/workflows/validate.py +55 -43
  78. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
  79. clarifai-11.4.0.dist-info/RECORD +109 -0
  80. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
  81. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  82. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  83. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  84. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  85. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  86. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  87. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  88. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  89. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  90. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  91. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  92. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  93. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  94. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  95. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  96. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  97. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  98. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  99. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  100. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  101. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  102. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  103. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  104. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  105. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  106. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  107. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  108. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  109. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  110. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  111. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  112. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  113. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  114. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  115. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  116. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  117. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  118. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  119. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  120. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  121. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  122. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  123. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  124. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  125. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  126. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  127. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  128. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  129. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  130. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  131. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  132. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  133. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  134. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  135. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  136. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  137. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  138. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  139. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  140. clarifai/client/cli/__init__.py +0 -0
  141. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  142. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  143. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  144. clarifai/client/cli/base_cli.py +0 -88
  145. clarifai/client/cli/model_cli.py +0 -29
  146. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  147. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  148. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  149. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  150. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  151. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  152. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  153. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  154. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  155. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  156. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  157. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  158. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  159. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  160. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  161. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  162. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  163. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  164. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  165. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  166. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  167. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  168. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  169. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  170. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  171. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  172. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  173. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  174. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  175. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  176. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  177. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  178. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  179. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  180. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  181. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  182. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  183. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  184. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  185. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  186. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  187. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  188. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  189. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  190. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  191. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  192. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  193. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  194. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  195. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  196. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  197. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  198. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  199. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  200. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
  201. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
  202. clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
  203. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  204. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  205. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  206. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  207. clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
  208. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  209. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  210. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  211. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  212. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  213. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  214. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  215. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  216. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  217. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  218. clarifai/runners/models/base_typed_model.py +0 -238
  219. clarifai/runners/models/model_class_refract.py +0 -80
  220. clarifai/runners/models/model_upload.py +0 -607
  221. clarifai/runners/models/temp.py +0 -25
  222. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  223. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  224. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  225. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  226. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  227. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  228. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  229. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  230. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  231. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  232. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  233. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  234. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  235. clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
  236. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  237. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  238. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  239. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  240. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  241. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  242. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  243. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  244. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  245. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  246. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  247. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  248. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  249. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  250. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  251. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  252. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  253. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  254. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  255. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  256. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  257. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  258. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  259. clarifai/runners/utils/data_handler.py +0 -231
  260. clarifai/runners/utils/data_handler_refract.py +0 -213
  261. clarifai/runners/utils/data_types.py +0 -469
  262. clarifai/runners/utils/logger.py +0 -0
  263. clarifai/runners/utils/openai_format.py +0 -87
  264. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  265. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  266. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  267. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  268. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  269. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  270. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  271. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  272. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  273. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  274. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  275. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  276. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  277. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  278. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  279. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  280. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  281. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  282. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  283. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  284. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  285. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  286. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  287. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  288. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  289. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  290. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  291. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  292. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  293. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  294. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  295. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  296. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  297. clarifai-11.3.0rc2.dist-info/RECORD +0 -322
  298. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
  299. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
  300. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
@@ -1,30 +1,28 @@
1
- import operator
2
1
  from io import BytesIO
3
2
  from typing import List
4
3
 
5
4
  from clarifai_grpc.grpc.api import resources_pb2
6
- from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeEnumOption
5
+ from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeEnumOption, ModelTypeRangeInfo
7
6
  from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeField as InputFieldProto
8
- from clarifai_grpc.grpc.api.resources_pb2 import ModelTypeRangeInfo
9
7
  from PIL import Image
10
8
 
11
9
  from clarifai.runners.utils.data_types import MessageData
12
10
 
13
11
 
14
12
  def image_to_bytes(img: Image.Image, format="JPEG") -> bytes:
15
- buffered = BytesIO()
16
- img.save(buffered, format=format)
17
- img_str = buffered.getvalue()
18
- return img_str
13
+ buffered = BytesIO()
14
+ img.save(buffered, format=format)
15
+ img_str = buffered.getvalue()
16
+ return img_str
19
17
 
20
18
 
21
19
  def bytes_to_image(bytes_img) -> Image.Image:
22
- img = Image.open(BytesIO(bytes_img))
23
- return img
20
+ img = Image.open(BytesIO(bytes_img))
21
+ return img
24
22
 
25
23
 
26
24
  def is_openai_chat_format(messages):
27
- """
25
+ """
28
26
  Verify if the given argument follows the OpenAI chat messages format.
29
27
 
30
28
  Args:
@@ -33,447 +31,396 @@ def is_openai_chat_format(messages):
33
31
  Returns:
34
32
  bool: True if valid, False otherwise.
35
33
  """
36
- if not isinstance(messages, list):
37
- return False
34
+ if not isinstance(messages, list):
35
+ return False
38
36
 
39
- valid_roles = {"system", "user", "assistant", "function"}
37
+ valid_roles = {"system", "user", "assistant", "function"}
40
38
 
41
- for msg in messages:
42
- if not isinstance(msg, dict):
43
- return False
44
- if "role" not in msg or "content" not in msg:
45
- return False
46
- if msg["role"] not in valid_roles:
47
- return False
39
+ for msg in messages:
40
+ if not isinstance(msg, dict):
41
+ return False
42
+ if "role" not in msg or "content" not in msg:
43
+ return False
44
+ if msg["role"] not in valid_roles:
45
+ return False
48
46
 
49
- content = msg["content"]
47
+ content = msg["content"]
50
48
 
51
- # Content should be either a string (text message) or a multimodal list
52
- if isinstance(content, str):
53
- continue # Valid text message
49
+ # Content should be either a string (text message) or a multimodal list
50
+ if isinstance(content, str):
51
+ continue # Valid text message
54
52
 
55
- elif isinstance(content, list):
56
- for item in content:
57
- if not isinstance(item, dict):
58
- return False
59
- return True
53
+ elif isinstance(content, list):
54
+ for item in content:
55
+ if not isinstance(item, dict):
56
+ return False
57
+ return True
60
58
 
61
59
 
62
60
  class InputField(MessageData):
63
- """A field that can be used to store input data."""
64
-
65
- def __init__(self,
66
- default=None,
67
- description=None,
68
- min_value=None,
69
- max_value=None,
70
- choices=None,
71
- is_param=True):
72
- self.default = default
73
- self.description = description
74
- self.min_value = min_value
75
- self.max_value = max_value
76
- self.choices = choices
77
- self.is_param = is_param
78
-
79
- def __repr__(self) -> str:
80
- attrs = []
81
- if self.default is not None:
82
- attrs.append(f"default={self.default!r}")
83
- if self.description is not None:
84
- attrs.append(f"description={self.description!r}")
85
- if self.min_value is not None:
86
- attrs.append(f"min_value={self.min_value!r}")
87
- if self.max_value is not None:
88
- attrs.append(f"max_value={self.max_value!r}")
89
- if self.choices is not None:
90
- attrs.append(f"choices={self.choices!r}")
91
- attrs.append(f"is_param={self.is_param!r}")
92
- return f"InputField({', '.join(attrs)})"
93
-
94
- # All *explicit* conversions
95
- def __int__(self):
96
- return int(self.default)
97
-
98
- def __float__(self):
99
- return float(self.default)
100
-
101
- def __str__(self):
102
- return str(self.default)
103
-
104
- def __bool__(self):
105
- return bool(self.default)
106
-
107
- def __index__(self):
108
- return int(self.default) # for slicing
109
-
110
- # sequence / mapping protocol delegation
111
- def __len__(self):
112
- return len(self.default)
113
-
114
- def __iter__(self):
115
- return iter(self.default)
116
-
117
- def __reversed__(self):
118
- return reversed(self.default)
119
-
120
- def __contains__(self, item):
121
- return item in self.default
122
-
123
- def __getitem__(self, key):
124
- return self.default[key]
125
-
126
- def __setitem__(self, k, v):
127
- self.default[k] = v
128
-
129
- def __delitem__(self, k):
130
- del self.default[k]
131
-
132
- def __hash__(self):
133
- return hash(self.default)
134
-
135
- def __call__(self, *args, **kwargs):
136
- return self.default(*args, **kwargs)
137
-
138
- # Comparison operators
139
- def __eq__(self, other):
140
- return self.default == other
141
-
142
- def __lt__(self, other):
143
- return self.default < other
144
-
145
- def __le__(self, other):
146
- return self.default <= other
147
-
148
- def __gt__(self, other):
149
- return self.default > other
150
-
151
- def __ge__(self, other):
152
- return self.default >= other
153
-
154
- # Arithmetic operators – # arithmetic & bitwise operators – auto-generated
155
- _arith_ops = {
156
- "__add__": operator.add,
157
- "__sub__": operator.sub,
158
- "__mul__": operator.mul,
159
- "__truediv__": operator.truediv,
160
- "__floordiv__": operator.floordiv,
161
- "__mod__": operator.mod,
162
- "__pow__": operator.pow,
163
- "__and__": operator.and_,
164
- "__or__": operator.or_,
165
- "__xor__": operator.xor,
166
- "__lshift__": operator.lshift,
167
- "__rshift__": operator.rshift,
168
- }
169
-
170
- # Create both left- and right-hand versions of each operator
171
- for _name, _op in _arith_ops.items():
172
-
173
- def _make(op):
174
-
175
- def _f(self, other, *, _op=op): # default arg binds op
176
- return _op(self.default, other)
177
-
178
- return _f
179
-
180
- locals()[_name] = _make(_op)
181
- locals()["__r" + _name[2:]] = _make(lambda x, y, _op=_op: _op(y, x))
182
- del _name, _op, _make
183
-
184
- # Attribute access delegation – anything we did *not* define above
185
- # will automatically be looked up on the wrapped default value.
186
- def __getattr__(self, item):
187
- return getattr(self.default, item)
188
-
189
- def to_proto(self, proto=None) -> InputFieldProto:
190
- if proto is None:
191
- proto = InputFieldProto()
192
- if self.description is not None:
193
- proto.description = self.description
194
-
195
- if self.choices is not None:
196
- for choice in self.choices:
197
- option = ModelTypeEnumOption(id=str(choice))
198
- proto.model_type_enum_options.append(option)
199
-
200
- proto.required = self.default is None
201
-
202
- if self.min_value is not None or self.max_value is not None:
203
- range_info = ModelTypeRangeInfo()
204
- if self.min_value is not None:
205
- range_info.min = float(self.min_value)
206
- if self.max_value is not None:
207
- range_info.max = float(self.max_value)
208
- proto.model_type_range_info.CopyFrom(range_info)
209
- proto.is_param = self.is_param
210
-
211
- if self.default is not None:
212
- proto = self.set_default(proto, self.default)
61
+ """A field that can be used to store input data."""
62
+
63
+ def __init__(
64
+ self,
65
+ default=None,
66
+ description=None,
67
+ min_value=None,
68
+ max_value=None,
69
+ choices=None,
70
+ # is_param=True
71
+ ):
72
+ self.default = default
73
+ self.description = description
74
+ self.min_value = min_value
75
+ self.max_value = max_value
76
+ self.choices = choices
77
+ # self.is_param = is_param
78
+
79
+ def __repr__(self) -> str:
80
+ attrs = []
81
+ if self.default is not None:
82
+ attrs.append(f"default={self.default!r}")
83
+ if self.description is not None:
84
+ attrs.append(f"description={self.description!r}")
85
+ if self.min_value is not None:
86
+ attrs.append(f"min_value={self.min_value!r}")
87
+ if self.max_value is not None:
88
+ attrs.append(f"max_value={self.max_value!r}")
89
+ if self.choices is not None:
90
+ attrs.append(f"choices={self.choices!r}")
91
+ # attrs.append(f"is_param={self.is_param!r}")
92
+ return f"InputField({', '.join(attrs)})"
93
+
94
+ def to_proto(self, proto=None) -> InputFieldProto:
95
+ if proto is None:
96
+ proto = InputFieldProto()
97
+ if self.description is not None:
98
+ proto.description = self.description
99
+
100
+ if self.choices is not None:
101
+ for choice in self.choices:
102
+ option = ModelTypeEnumOption(id=str(choice))
103
+ proto.model_type_enum_options.append(option)
104
+
105
+ proto.required = self.default is None
106
+
107
+ if self.min_value is not None or self.max_value is not None:
108
+ range_info = ModelTypeRangeInfo()
109
+ if self.min_value is not None:
110
+ range_info.min = float(self.min_value)
111
+ if self.max_value is not None:
112
+ range_info.max = float(self.max_value)
113
+ proto.model_type_range_info.CopyFrom(range_info)
114
+ # proto.is_param = self.is_param
115
+
116
+ if self.default is not None:
117
+ proto = self.set_default(proto, self.default)
118
+
119
+ return proto
120
+
121
+ @classmethod
122
+ def from_proto(cls, proto):
123
+ default = None
124
+ if proto.HasField('default'):
125
+ pb_value = proto.default
126
+ if pb_value.HasField('string_value'):
127
+ default = pb_value.string_value
128
+ try:
129
+ import json
130
+
131
+ default = json.loads(default)
132
+ except json.JSONDecodeError:
133
+ pass
134
+ elif pb_value.HasField('number_value'):
135
+ default = pb_value.number_value
136
+ if default.is_integer():
137
+ default = int(default)
138
+ else:
139
+ default = float(default)
140
+ elif pb_value.HasField('bool_value'):
141
+ default = pb_value.bool_value
142
+
143
+ choices = (
144
+ [option.id for option in proto.model_type_enum_options]
145
+ if proto.model_type_enum_options
146
+ else None
147
+ )
148
+
149
+ min_value = None
150
+ max_value = None
151
+ if proto.HasField('model_type_range_info'):
152
+ min_value = proto.model_type_range_info.min
153
+ max_value = proto.model_type_range_info.max
154
+ if min_value.is_integer():
155
+ min_value = int(min_value)
156
+ if max_value.is_integer():
157
+ max_value = int(max_value)
158
+
159
+ return cls(
160
+ default=default,
161
+ description=proto.description if proto.description else None,
162
+ min_value=min_value,
163
+ max_value=max_value,
164
+ choices=choices,
165
+ # is_param=proto.is_param
166
+ )
167
+
168
+ @classmethod
169
+ def set_default(cls, proto=None, default=None):
170
+ try:
171
+ import json
172
+
173
+ if proto is None:
174
+ proto = InputFieldProto()
175
+ if default is not None:
176
+ proto.default = json.dumps(default)
177
+ return proto
178
+ except Exception:
179
+ if default is not None:
180
+ proto.default = str(default)
181
+ return proto
182
+ except Exception as e:
183
+ raise ValueError(
184
+ f"Error setting default value of type, {type(default)} and value: {default}: {e}"
185
+ )
186
+
187
+ @classmethod
188
+ def get_default(cls, proto):
189
+ default_str = proto.default
190
+ default = None
191
+ import json
213
192
 
214
- return proto
215
-
216
- @classmethod
217
- def from_proto(cls, proto):
218
- default = None
219
- if proto.HasField('default'):
220
- pb_value = proto.default
221
- if pb_value.HasField('string_value'):
222
- default = pb_value.string_value
223
193
  try:
224
- import json
225
- default = json.loads(default)
194
+ # Attempt to parse as JSON first (for complex types)
195
+ return json.loads(default_str)
226
196
  except json.JSONDecodeError:
227
- pass
228
- elif pb_value.HasField('number_value'):
229
- default = pb_value.number_value
230
- if default.is_integer():
231
- default = int(default)
232
- else:
233
- default = float(default)
234
- elif pb_value.HasField('bool_value'):
235
- default = pb_value.bool_value
236
-
237
- choices = [option.id for option in proto.model_type_enum_options
238
- ] if proto.model_type_enum_options else None
239
-
240
- min_value = None
241
- max_value = None
242
- if proto.HasField('model_type_range_info'):
243
- min_value = proto.model_type_range_info.min
244
- max_value = proto.model_type_range_info.max
245
- if min_value.is_integer():
246
- min_value = int(min_value)
247
- if max_value.is_integer():
248
- max_value = int(max_value)
249
-
250
- return cls(
251
- default=default,
252
- description=proto.description if proto.description else None,
253
- min_value=min_value,
254
- max_value=max_value,
255
- choices=choices,
256
- is_param=proto.is_param)
257
-
258
- @classmethod
259
- def set_default(cls, proto=None, default=None):
260
- try:
261
- import json
262
- if proto is None:
263
- proto = InputFieldProto()
264
- if default is not None:
265
- proto.default = json.dumps(default)
266
- return proto
267
- except Exception:
268
- if default is not None:
269
- proto.default = str(default)
270
- return proto
271
- except Exception as e:
272
- raise ValueError(
273
- f"Error setting default value of type, {type(default)} and value: {default}: {e}")
274
-
275
- @classmethod
276
- def get_default(cls, proto):
277
- default_str = proto.default
278
- default = None
279
- import json
280
- try:
281
- # Attempt to parse as JSON first (for complex types)
282
- return json.loads(default_str)
283
- except json.JSONDecodeError:
284
- pass
285
- # Check for boolean values stored as "True" or "False"
286
- if proto.type == resources_pb2.ModelTypeField.DataType.BOOL:
287
- try:
288
- default = bool(default_str)
289
- except ValueError:
290
- pass
291
- # Try to parse as integer
292
- elif proto.type == resources_pb2.ModelTypeField.DataType.INT:
293
- try:
294
- default = int(default_str)
295
- except ValueError:
296
- pass
297
-
298
- # Try to parse as float
299
- elif proto.type == resources_pb2.ModelTypeField.DataType.FLOAT:
300
- try:
301
- default = float(default_str)
302
- except ValueError:
303
- pass
304
- elif proto.type == resources_pb2.ModelTypeField.DataType.STR:
305
- default = default_str
306
-
307
- if default is None:
308
- # If all parsing fails, return the string value
309
- default = default_str
310
- return default
197
+ pass
198
+ # Check for boolean values stored as "True" or "False"
199
+ if proto.type == resources_pb2.ModelTypeField.DataType.BOOL:
200
+ try:
201
+ default = bool(default_str)
202
+ except ValueError:
203
+ pass
204
+ # Try to parse as integer
205
+ elif proto.type == resources_pb2.ModelTypeField.DataType.INT:
206
+ try:
207
+ default = int(default_str)
208
+ except ValueError:
209
+ pass
210
+
211
+ # Try to parse as float
212
+ elif proto.type == resources_pb2.ModelTypeField.DataType.FLOAT:
213
+ try:
214
+ default = float(default_str)
215
+ except ValueError:
216
+ pass
217
+ elif proto.type == resources_pb2.ModelTypeField.DataType.STR:
218
+ default = default_str
219
+
220
+ if default is None:
221
+ # If all parsing fails, return the string value
222
+ default = default_str
223
+ return default
311
224
 
312
225
 
313
226
  class DataConverter:
314
- """A class that can be used to convert data to and from a specific format."""
315
-
316
- @classmethod
317
- def convert_input_data_to_new_format(
318
- cls, data: resources_pb2.Data,
319
- input_fields: List[resources_pb2.ModelTypeField]) -> resources_pb2.Data:
320
- """Convert input data to new format."""
321
- new_data = resources_pb2.Data()
322
- for field in input_fields:
323
- part_data = cls._convert_field(data, field)
324
- if cls._is_data_set(part_data):
325
- # if the field is set, add it to the new data part
326
- part = new_data.parts.add()
327
- part.id = field.name
328
- part.data.CopyFrom(part_data)
329
- else:
330
- if field.required:
331
- raise ValueError(f"Field {field.name} is required but not set")
332
- return new_data
333
-
334
- @classmethod
335
- def _convert_field(cls, old_data: resources_pb2.Data,
336
- field: resources_pb2.ModelTypeField) -> resources_pb2.Data:
337
- data_type = field.type
338
- new_data = resources_pb2.Data()
339
- if data_type == resources_pb2.ModelTypeField.DataType.STR:
340
- if old_data.HasField('text'):
341
- new_data.string_value = old_data.text.raw
342
- old_data.ClearField('text')
343
- return new_data
344
- elif data_type == resources_pb2.ModelTypeField.DataType.IMAGE:
345
- if old_data.HasField('image'):
346
- new_data.image.CopyFrom(old_data.image)
347
- # Clear the old field to avoid duplication
348
- old_data.ClearField('image')
349
- return new_data
350
- elif data_type == resources_pb2.ModelTypeField.DataType.VIDEO:
351
- if old_data.HasField('video'):
352
- new_data.video.CopyFrom(old_data.video)
353
- old_data.ClearField('video')
354
- return new_data
355
- elif data_type == resources_pb2.ModelTypeField.DataType.BOOL:
356
- if old_data.bool_value is not False:
357
- new_data.bool_value = old_data.bool_value
358
- old_data.bool_value = False
359
- return new_data
360
- elif data_type == resources_pb2.ModelTypeField.DataType.INT:
361
- if old_data.int_value != 0:
362
- new_data.int_value = old_data.int_value
363
- old_data.int_value = 0
364
- return new_data
365
- elif data_type == resources_pb2.ModelTypeField.DataType.FLOAT:
366
- if old_data.float_value != 0.0:
367
- new_data.float_value = old_data.float_value
368
- old_data.float_value = 0.0
369
- return new_data
370
- elif data_type == resources_pb2.ModelTypeField.DataType.BYTES:
371
- if old_data.bytes_value != b"":
372
- new_data.bytes_value = old_data.bytes_value
373
- old_data.bytes_value = b""
374
- return new_data
375
- elif data_type == resources_pb2.ModelTypeField.DataType.NDARRAY:
376
- if old_data.HasField('ndarray'):
377
- new_data.ndarray.CopyFrom(old_data.ndarray)
378
- old_data.ClearField('ndarray')
379
- return new_data
380
- elif data_type == resources_pb2.ModelTypeField.DataType.TEXT:
381
- if old_data.HasField('text'):
382
- new_data.text.CopyFrom(old_data.text)
383
- old_data.ClearField('text')
384
- return new_data
385
- elif data_type == resources_pb2.ModelTypeField.DataType.AUDIO:
386
- if old_data.HasField('audio'):
387
- new_data.audio.CopyFrom(old_data.audio)
388
- old_data.ClearField('audio')
389
- return new_data
390
- elif data_type == resources_pb2.ModelTypeField.DataType.CONCEPT:
391
- if old_data.concepts:
392
- new_data.concepts.extend(old_data.concepts)
393
- old_data.ClearField('concepts')
394
- return new_data
395
- elif data_type == resources_pb2.ModelTypeField.DataType.REGION:
396
- if old_data.regions:
397
- new_data.regions.extend(old_data.regions)
398
- old_data.ClearField('regions')
399
- return new_data
400
- elif data_type == resources_pb2.ModelTypeField.DataType.FRAME:
401
- if old_data.frames:
402
- new_data.frames.extend(old_data.frames)
403
- old_data.ClearField('frames')
404
- return new_data
405
- elif data_type == resources_pb2.ModelTypeField.DataType.LIST:
406
- if not field.type_args:
407
- raise ValueError("LIST type requires type_args")
408
- element_field = field.type_args[0]
409
- if element_field in (resources_pb2.ModelTypeField.DataType.CONCEPT,
410
- resources_pb2.ModelTypeField.DataType.REGION,
411
- resources_pb2.ModelTypeField.DataType.FRAME):
412
- # convert to new format
413
- new_data = cls._convert_field(old_data, element_field)
414
- return new_data
415
- else:
416
- return new_data
417
- # raise ValueError(f"Unsupported data type: {data_type}")
418
-
419
- @classmethod
420
- def is_old_format(cls, data: resources_pb2.Data) -> bool:
421
- """Check if the Data proto is in the old format (without parts)."""
422
- if len(data.parts) > 0:
423
- return False # New format uses parts
424
-
425
- # Check if any singular field is set
426
- singular_fields = [
427
- 'image', 'video', 'metadata', 'geo', 'text', 'audio', 'ndarray', 'int_value',
428
- 'float_value', 'bytes_value', 'bool_value', 'string_value'
429
- ]
430
- for field in singular_fields:
431
- if data.HasField(field):
432
- return True
433
-
434
- # Check if any repeated field has elements
435
- repeated_fields = [
436
- 'concepts', 'colors', 'clusters', 'embeddings', 'regions', 'frames', 'tracks',
437
- 'time_segments', 'hits', 'heatmaps'
438
- ]
439
- for field in repeated_fields:
440
- if getattr(data, field):
441
- return True
442
-
443
- return False
444
-
445
- @classmethod
446
- def convert_output_data_to_old_format(cls, data: resources_pb2.Data) -> resources_pb2.Data:
447
- """Convert output data to old format."""
448
- old_data = resources_pb2.Data()
449
- part_data = data.parts[0].data
450
- # Handle text.raw specially (common case for text outputs)
451
- old_data = part_data
452
- if old_data.string_value:
453
- old_data.text.raw = old_data.string_value
454
-
455
- return old_data
456
-
457
- @classmethod
458
- def _is_data_set(cls, data_msg):
459
- # Singular message fields
460
- singular_fields = ["image", "video", "metadata", "geo", "text", "audio", "ndarray"]
461
- for field in singular_fields:
462
- if data_msg.HasField(field):
463
- return True
464
-
465
- # Repeated fields
466
- repeated_fields = [
467
- "concepts", "colors", "clusters", "embeddings", "regions", "frames", "tracks",
468
- "time_segments", "hits", "heatmaps", "parts"
469
- ]
470
- for field in repeated_fields:
471
- if getattr(data_msg, field): # checks if the list is not empty
472
- return True
473
-
474
- # Scalar fields (proto3 default: 0 for numbers, empty for strings/bytes, False for bool)
475
- if (data_msg.int_value != 0 or data_msg.float_value != 0.0 or data_msg.bytes_value != b"" or
476
- data_msg.bool_value is True or data_msg.string_value != ""):
477
- return True
478
-
479
- return False
227
+ """A class that can be used to convert data to and from a specific format."""
228
+
229
+ @classmethod
230
+ def convert_input_data_to_new_format(
231
+ cls, data: resources_pb2.Data, input_fields: List[resources_pb2.ModelTypeField]
232
+ ) -> resources_pb2.Data:
233
+ """Convert input data to new format."""
234
+ new_data = resources_pb2.Data()
235
+ for field in input_fields:
236
+ part_data = cls._convert_field(data, field)
237
+ if cls._is_data_set(part_data):
238
+ # if the field is set, add it to the new data part
239
+ part = new_data.parts.add()
240
+ part.id = field.name
241
+ part.data.CopyFrom(part_data)
242
+ elif field.required:
243
+ raise ValueError(f"Field {field.name} is required but not set")
244
+ return new_data
245
+
246
+ @classmethod
247
+ def _convert_field(
248
+ cls, old_data: resources_pb2.Data, field: resources_pb2.ModelTypeField
249
+ ) -> resources_pb2.Data:
250
+ data_type = field.type
251
+ new_data = resources_pb2.Data()
252
+ if data_type == resources_pb2.ModelTypeField.DataType.STR:
253
+ if old_data.HasField('text'):
254
+ new_data.string_value = old_data.text.raw
255
+ old_data.ClearField('text')
256
+ return new_data
257
+ elif data_type == resources_pb2.ModelTypeField.DataType.IMAGE:
258
+ if old_data.HasField('image'):
259
+ new_data.image.CopyFrom(old_data.image)
260
+ # Clear the old field to avoid duplication
261
+ old_data.ClearField('image')
262
+ return new_data
263
+ elif data_type == resources_pb2.ModelTypeField.DataType.VIDEO:
264
+ if old_data.HasField('video'):
265
+ new_data.video.CopyFrom(old_data.video)
266
+ old_data.ClearField('video')
267
+ return new_data
268
+ elif data_type == resources_pb2.ModelTypeField.DataType.BOOL:
269
+ if old_data.bool_value is not False:
270
+ new_data.bool_value = old_data.bool_value
271
+ old_data.bool_value = False
272
+ return new_data
273
+ elif data_type == resources_pb2.ModelTypeField.DataType.INT:
274
+ if old_data.int_value != 0:
275
+ new_data.int_value = old_data.int_value
276
+ old_data.int_value = 0
277
+ return new_data
278
+ elif data_type == resources_pb2.ModelTypeField.DataType.FLOAT:
279
+ if old_data.float_value != 0.0:
280
+ new_data.float_value = old_data.float_value
281
+ old_data.float_value = 0.0
282
+ return new_data
283
+ elif data_type == resources_pb2.ModelTypeField.DataType.BYTES:
284
+ if old_data.bytes_value != b"":
285
+ new_data.bytes_value = old_data.bytes_value
286
+ old_data.bytes_value = b""
287
+ return new_data
288
+ elif data_type == resources_pb2.ModelTypeField.DataType.NDARRAY:
289
+ if old_data.HasField('ndarray'):
290
+ new_data.ndarray.CopyFrom(old_data.ndarray)
291
+ old_data.ClearField('ndarray')
292
+ return new_data
293
+ elif data_type == resources_pb2.ModelTypeField.DataType.TEXT:
294
+ if old_data.HasField('text'):
295
+ new_data.text.CopyFrom(old_data.text)
296
+ old_data.ClearField('text')
297
+ return new_data
298
+ elif data_type == resources_pb2.ModelTypeField.DataType.AUDIO:
299
+ if old_data.HasField('audio'):
300
+ new_data.audio.CopyFrom(old_data.audio)
301
+ old_data.ClearField('audio')
302
+ return new_data
303
+ elif data_type == resources_pb2.ModelTypeField.DataType.CONCEPT:
304
+ if old_data.concepts:
305
+ new_data.concepts.extend(old_data.concepts)
306
+ old_data.ClearField('concepts')
307
+ return new_data
308
+ elif data_type == resources_pb2.ModelTypeField.DataType.REGION:
309
+ if old_data.regions:
310
+ new_data.regions.extend(old_data.regions)
311
+ old_data.ClearField('regions')
312
+ return new_data
313
+ elif data_type == resources_pb2.ModelTypeField.DataType.FRAME:
314
+ if old_data.frames:
315
+ new_data.frames.extend(old_data.frames)
316
+ old_data.ClearField('frames')
317
+ return new_data
318
+ elif data_type == resources_pb2.ModelTypeField.DataType.LIST:
319
+ if not field.type_args:
320
+ raise ValueError("LIST type requires type_args")
321
+ element_field = field.type_args[0]
322
+ if element_field in (
323
+ resources_pb2.ModelTypeField.DataType.CONCEPT,
324
+ resources_pb2.ModelTypeField.DataType.REGION,
325
+ resources_pb2.ModelTypeField.DataType.FRAME,
326
+ ):
327
+ # convert to new format
328
+ new_data = cls._convert_field(old_data, element_field)
329
+ return new_data
330
+ else:
331
+ return new_data
332
+ # raise ValueError(f"Unsupported data type: {data_type}")
333
+
334
+ @classmethod
335
+ def is_old_format(cls, data: resources_pb2.Data) -> bool:
336
+ """Check if the Data proto is in the old format (without parts)."""
337
+ if len(data.parts) > 0:
338
+ return False # New format uses parts
339
+
340
+ # Check if any singular field is set
341
+ singular_fields = [
342
+ 'image',
343
+ 'video',
344
+ 'metadata',
345
+ 'geo',
346
+ 'text',
347
+ 'audio',
348
+ 'ndarray',
349
+ 'int_value',
350
+ 'float_value',
351
+ 'bytes_value',
352
+ 'bool_value',
353
+ 'string_value',
354
+ ]
355
+ for field in singular_fields:
356
+ if data.HasField(field):
357
+ return True
358
+
359
+ # Check if any repeated field has elements
360
+ repeated_fields = [
361
+ 'concepts',
362
+ 'colors',
363
+ 'clusters',
364
+ 'embeddings',
365
+ 'regions',
366
+ 'frames',
367
+ 'tracks',
368
+ 'time_segments',
369
+ 'hits',
370
+ 'heatmaps',
371
+ ]
372
+ for field in repeated_fields:
373
+ if getattr(data, field):
374
+ return True
375
+
376
+ return False
377
+
378
+ @classmethod
379
+ def convert_output_data_to_old_format(cls, data: resources_pb2.Data) -> resources_pb2.Data:
380
+ """Convert output data to old format."""
381
+ old_data = resources_pb2.Data()
382
+ part_data = data.parts[0].data
383
+ # Handle text.raw specially (common case for text outputs)
384
+ old_data = part_data
385
+ if old_data.string_value:
386
+ old_data.text.raw = old_data.string_value
387
+
388
+ return old_data
389
+
390
+ @classmethod
391
+ def _is_data_set(cls, data_msg):
392
+ # Singular message fields
393
+ singular_fields = ["image", "video", "metadata", "geo", "text", "audio", "ndarray"]
394
+ for field in singular_fields:
395
+ if data_msg.HasField(field):
396
+ return True
397
+
398
+ # Repeated fields
399
+ repeated_fields = [
400
+ "concepts",
401
+ "colors",
402
+ "clusters",
403
+ "embeddings",
404
+ "regions",
405
+ "frames",
406
+ "tracks",
407
+ "time_segments",
408
+ "hits",
409
+ "heatmaps",
410
+ "parts",
411
+ ]
412
+ for field in repeated_fields:
413
+ if getattr(data_msg, field): # checks if the list is not empty
414
+ return True
415
+
416
+ # Scalar fields (proto3 default: 0 for numbers, empty for strings/bytes, False for bool)
417
+ if (
418
+ data_msg.int_value != 0
419
+ or data_msg.float_value != 0.0
420
+ or data_msg.bytes_value != b""
421
+ or data_msg.bool_value is True
422
+ or data_msg.string_value != ""
423
+ ):
424
+ return True
425
+
426
+ return False