clarifai 9.10.1__py3-none-any.whl → 9.10.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (323) hide show
  1. clarifai/client/__init__.py +3 -2
  2. clarifai/client/app.py +39 -23
  3. clarifai/client/base.py +6 -6
  4. clarifai/client/dataset.py +113 -55
  5. clarifai/client/input.py +47 -55
  6. clarifai/client/model.py +27 -25
  7. clarifai/client/module.py +13 -11
  8. clarifai/client/runner.py +5 -3
  9. clarifai/client/search.py +29 -10
  10. clarifai/client/user.py +14 -8
  11. clarifai/client/workflow.py +22 -20
  12. clarifai/constants/dataset.py +22 -0
  13. clarifai/datasets/upload/base.py +9 -7
  14. clarifai/datasets/upload/features.py +3 -3
  15. clarifai/datasets/upload/image.py +49 -50
  16. clarifai/datasets/upload/loaders/coco_captions.py +26 -80
  17. clarifai/datasets/upload/loaders/coco_detection.py +56 -115
  18. clarifai/datasets/upload/loaders/coco_segmentation.py +69 -137
  19. clarifai/datasets/upload/loaders/imagenet_classification.py +2 -3
  20. clarifai/datasets/upload/loaders/xview_detection.py +3 -3
  21. clarifai/datasets/upload/text.py +16 -16
  22. clarifai/datasets/upload/utils.py +196 -21
  23. clarifai/utils/misc.py +21 -0
  24. clarifai/versions.py +1 -1
  25. {clarifai-9.10.1.dist-info → clarifai-9.10.3.dist-info}/METADATA +3 -3
  26. clarifai-9.10.3.dist-info/RECORD +96 -0
  27. clarifai-9.10.3.dist-info/top_level.txt +1 -0
  28. clarifai/auth/__init__.py +0 -6
  29. clarifai/auth/helper.py +0 -367
  30. clarifai/auth/register.py +0 -23
  31. clarifai/auth/stub.py +0 -127
  32. clarifai/datasets/upload/examples/README.md +0 -31
  33. clarifai/datasets/upload/examples/image_classification/__init__.py +0 -0
  34. clarifai/datasets/upload/examples/image_classification/cifar10/__init__.py +0 -0
  35. clarifai/datasets/upload/examples/image_classification/cifar10/cifar_small_test.csv +0 -10
  36. clarifai/datasets/upload/examples/image_classification/cifar10/cifar_small_train.csv +0 -10
  37. clarifai/datasets/upload/examples/image_classification/cifar10/dataset.py +0 -46
  38. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
  39. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
  40. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
  41. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
  42. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
  43. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
  44. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
  45. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
  46. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
  47. clarifai/datasets/upload/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
  48. clarifai/datasets/upload/examples/image_classification/food-101/__init__.py +0 -0
  49. clarifai/datasets/upload/examples/image_classification/food-101/dataset.py +0 -39
  50. clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
  51. clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
  52. clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
  53. clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
  54. clarifai/datasets/upload/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
  55. clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
  56. clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
  57. clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
  58. clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
  59. clarifai/datasets/upload/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
  60. clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
  61. clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
  62. clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
  63. clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
  64. clarifai/datasets/upload/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
  65. clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
  66. clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
  67. clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
  68. clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
  69. clarifai/datasets/upload/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
  70. clarifai/datasets/upload/examples/text_classification/__init__.py +0 -0
  71. clarifai/datasets/upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
  72. clarifai/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +0 -42
  73. clarifai/datasets/upload/examples/text_classification/imdb_dataset/test.csv +0 -201
  74. clarifai/datasets/upload/examples/text_classification/imdb_dataset/train.csv +0 -201
  75. clarifai/datasets/upload/loaders/README.md +0 -49
  76. clarifai/models/model_serving/README.md +0 -155
  77. clarifai/models/model_serving/docs/custom_config.md +0 -33
  78. clarifai/models/model_serving/docs/dependencies.md +0 -11
  79. clarifai/models/model_serving/docs/inference_parameters.md +0 -134
  80. clarifai/models/model_serving/docs/model_types.md +0 -20
  81. clarifai/models/model_serving/docs/output.md +0 -28
  82. clarifai/models/model_serving/examples/README.md +0 -7
  83. clarifai/models/model_serving/examples/image_classification/README.md +0 -9
  84. clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/README.md +0 -11
  85. clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/config.json +0 -42
  86. clarifai/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/preprocessor_config.json +0 -15
  87. clarifai/models/model_serving/examples/image_classification/age_vit/config.pbtxt +0 -23
  88. clarifai/models/model_serving/examples/image_classification/age_vit/labels.txt +0 -9
  89. clarifai/models/model_serving/examples/image_classification/age_vit/requirements.txt +0 -7
  90. clarifai/models/model_serving/examples/text_classification/README.md +0 -9
  91. clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/README.md +0 -12
  92. clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/config.json +0 -34
  93. clarifai/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/special_tokens_map.json +0 -1
  94. clarifai/models/model_serving/examples/text_classification/xlm-roberta/config.pbtxt +0 -21
  95. clarifai/models/model_serving/examples/text_classification/xlm-roberta/labels.txt +0 -3
  96. clarifai/models/model_serving/examples/text_classification/xlm-roberta/requirements.txt +0 -7
  97. clarifai/models/model_serving/examples/text_embedding/README.md +0 -9
  98. clarifai/models/model_serving/examples/text_to_image/README.md +0 -9
  99. clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/__init__.py +0 -0
  100. clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/inference.py +0 -52
  101. clarifai/models/model_serving/examples/text_to_image/sd-v1.5/1/model.py +0 -60
  102. clarifai/models/model_serving/examples/text_to_image/sd-v1.5/config.pbtxt +0 -22
  103. clarifai/models/model_serving/examples/text_to_image/sd-v1.5/requirements.txt +0 -6
  104. clarifai/models/model_serving/examples/text_to_text/README.md +0 -10
  105. clarifai/models/model_serving/examples/text_to_text/bart-summarize/config.pbtxt +0 -20
  106. clarifai/models/model_serving/examples/text_to_text/bart-summarize/requirements.txt +0 -4
  107. clarifai/models/model_serving/examples/visual_detection/README.md +0 -11
  108. clarifai/models/model_serving/examples/visual_detection/yolov5x/config.pbtxt +0 -36
  109. clarifai/models/model_serving/examples/visual_detection/yolov5x/labels.txt +0 -80
  110. clarifai/models/model_serving/examples/visual_detection/yolov5x/requirements.txt +0 -12
  111. clarifai/models/model_serving/examples/visual_embedding/README.md +0 -9
  112. clarifai/models/model_serving/examples/visual_embedding/vit-base/config.pbtxt +0 -22
  113. clarifai/models/model_serving/examples/visual_embedding/vit-base/requirements.txt +0 -5
  114. clarifai/models/model_serving/examples/visual_segmentation/README.md +0 -9
  115. clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/config.pbtxt +0 -24
  116. clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/labels.txt +0 -18
  117. clarifai/models/model_serving/examples/visual_segmentation/segformer-b2/requirements.txt +0 -5
  118. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -24
  119. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -18
  120. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -18
  121. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -18
  122. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -18
  123. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -18
  124. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -28
  125. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -18
  126. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -18
  127. clarifai/modules/README.md +0 -5
  128. clarifai/modules/style.css +0 -217
  129. clarifai-9.10.1.dist-info/RECORD +0 -386
  130. clarifai-9.10.1.dist-info/top_level.txt +0 -2
  131. clarifai_utils/__init__.py +0 -0
  132. clarifai_utils/auth/__init__.py +0 -6
  133. clarifai_utils/auth/helper.py +0 -367
  134. clarifai_utils/auth/register.py +0 -23
  135. clarifai_utils/auth/stub.py +0 -127
  136. clarifai_utils/cli.py +0 -0
  137. clarifai_utils/client/__init__.py +0 -16
  138. clarifai_utils/client/app.py +0 -684
  139. clarifai_utils/client/auth/__init__.py +0 -4
  140. clarifai_utils/client/auth/helper.py +0 -367
  141. clarifai_utils/client/auth/register.py +0 -23
  142. clarifai_utils/client/auth/stub.py +0 -127
  143. clarifai_utils/client/base.py +0 -131
  144. clarifai_utils/client/dataset.py +0 -442
  145. clarifai_utils/client/input.py +0 -892
  146. clarifai_utils/client/lister.py +0 -54
  147. clarifai_utils/client/model.py +0 -575
  148. clarifai_utils/client/module.py +0 -94
  149. clarifai_utils/client/runner.py +0 -161
  150. clarifai_utils/client/search.py +0 -239
  151. clarifai_utils/client/user.py +0 -253
  152. clarifai_utils/client/workflow.py +0 -223
  153. clarifai_utils/constants/model.py +0 -4
  154. clarifai_utils/constants/search.py +0 -2
  155. clarifai_utils/datasets/__init__.py +0 -0
  156. clarifai_utils/datasets/export/__init__.py +0 -0
  157. clarifai_utils/datasets/export/inputs_annotations.py +0 -222
  158. clarifai_utils/datasets/upload/__init__.py +0 -0
  159. clarifai_utils/datasets/upload/base.py +0 -66
  160. clarifai_utils/datasets/upload/examples/README.md +0 -31
  161. clarifai_utils/datasets/upload/examples/image_classification/__init__.py +0 -0
  162. clarifai_utils/datasets/upload/examples/image_classification/cifar10/__init__.py +0 -0
  163. clarifai_utils/datasets/upload/examples/image_classification/cifar10/cifar_small_test.csv +0 -10
  164. clarifai_utils/datasets/upload/examples/image_classification/cifar10/cifar_small_train.csv +0 -10
  165. clarifai_utils/datasets/upload/examples/image_classification/cifar10/dataset.py +0 -46
  166. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_700.jpg +0 -0
  167. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_701.jpg +0 -0
  168. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_702.jpg +0 -0
  169. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_703.jpg +0 -0
  170. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_704.jpg +0 -0
  171. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_705.jpg +0 -0
  172. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_706.jpg +0 -0
  173. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_707.jpg +0 -0
  174. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_708.jpg +0 -0
  175. clarifai_utils/datasets/upload/examples/image_classification/cifar10/images/test_batch_709.jpg +0 -0
  176. clarifai_utils/datasets/upload/examples/image_classification/food-101/__init__.py +0 -0
  177. clarifai_utils/datasets/upload/examples/image_classification/food-101/dataset.py +0 -39
  178. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/1420783.jpg +0 -0
  179. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/3287885.jpg +0 -0
  180. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/3617075.jpg +0 -0
  181. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/38052.jpg +0 -0
  182. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/beignets/39147.jpg +0 -0
  183. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/139558.jpg +0 -0
  184. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/1636096.jpg +0 -0
  185. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/2480925.jpg +0 -0
  186. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/3385808.jpg +0 -0
  187. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/hamburger/3647386.jpg +0 -0
  188. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/1826869.jpg +0 -0
  189. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/2243245.jpg +0 -0
  190. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/259212.jpg +0 -0
  191. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/2842688.jpg +0 -0
  192. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/prime_rib/3035414.jpg +0 -0
  193. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/1545393.jpg +0 -0
  194. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/2427642.jpg +0 -0
  195. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/3520891.jpg +0 -0
  196. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/377566.jpg +0 -0
  197. clarifai_utils/datasets/upload/examples/image_classification/food-101/images/ramen/503504.jpg +0 -0
  198. clarifai_utils/datasets/upload/examples/text_classification/__init__.py +0 -0
  199. clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/__init__.py +0 -0
  200. clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/dataset.py +0 -42
  201. clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/test.csv +0 -201
  202. clarifai_utils/datasets/upload/examples/text_classification/imdb_dataset/train.csv +0 -201
  203. clarifai_utils/datasets/upload/features.py +0 -44
  204. clarifai_utils/datasets/upload/image.py +0 -165
  205. clarifai_utils/datasets/upload/loaders/README.md +0 -49
  206. clarifai_utils/datasets/upload/loaders/__init__.py +0 -0
  207. clarifai_utils/datasets/upload/loaders/coco_captions.py +0 -103
  208. clarifai_utils/datasets/upload/loaders/coco_detection.py +0 -134
  209. clarifai_utils/datasets/upload/loaders/coco_segmentation.py +0 -166
  210. clarifai_utils/datasets/upload/loaders/imagenet_classification.py +0 -59
  211. clarifai_utils/datasets/upload/loaders/xview_detection.py +0 -148
  212. clarifai_utils/datasets/upload/text.py +0 -53
  213. clarifai_utils/datasets/upload/utils.py +0 -63
  214. clarifai_utils/errors.py +0 -89
  215. clarifai_utils/models/__init__.py +0 -0
  216. clarifai_utils/models/api.py +0 -283
  217. clarifai_utils/models/model_serving/README.md +0 -155
  218. clarifai_utils/models/model_serving/__init__.py +0 -12
  219. clarifai_utils/models/model_serving/cli/__init__.py +0 -12
  220. clarifai_utils/models/model_serving/cli/deploy_cli.py +0 -123
  221. clarifai_utils/models/model_serving/cli/model_zip.py +0 -61
  222. clarifai_utils/models/model_serving/cli/repository.py +0 -87
  223. clarifai_utils/models/model_serving/constants.py +0 -1
  224. clarifai_utils/models/model_serving/docs/custom_config.md +0 -33
  225. clarifai_utils/models/model_serving/docs/dependencies.md +0 -11
  226. clarifai_utils/models/model_serving/docs/inference_parameters.md +0 -134
  227. clarifai_utils/models/model_serving/docs/model_types.md +0 -20
  228. clarifai_utils/models/model_serving/docs/output.md +0 -28
  229. clarifai_utils/models/model_serving/examples/README.md +0 -7
  230. clarifai_utils/models/model_serving/examples/image_classification/README.md +0 -9
  231. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/__init__.py +0 -0
  232. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/inference.py +0 -56
  233. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/model.py +0 -61
  234. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/README.md +0 -11
  235. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/config.json +0 -42
  236. clarifai_utils/models/model_serving/examples/image_classification/age_vit/1/vit-age-classifier/preprocessor_config.json +0 -15
  237. clarifai_utils/models/model_serving/examples/image_classification/age_vit/config.pbtxt +0 -23
  238. clarifai_utils/models/model_serving/examples/image_classification/age_vit/labels.txt +0 -9
  239. clarifai_utils/models/model_serving/examples/image_classification/age_vit/requirements.txt +0 -7
  240. clarifai_utils/models/model_serving/examples/text_classification/README.md +0 -9
  241. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/__init__.py +0 -0
  242. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/inference.py +0 -55
  243. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/model.py +0 -61
  244. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/README.md +0 -12
  245. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/config.json +0 -34
  246. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/1/twitter-xlm-roberta-base-sentiment/special_tokens_map.json +0 -1
  247. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/config.pbtxt +0 -21
  248. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/labels.txt +0 -3
  249. clarifai_utils/models/model_serving/examples/text_classification/xlm-roberta/requirements.txt +0 -7
  250. clarifai_utils/models/model_serving/examples/text_embedding/README.md +0 -9
  251. clarifai_utils/models/model_serving/examples/text_to_image/README.md +0 -9
  252. clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/__init__.py +0 -0
  253. clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/inference.py +0 -52
  254. clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/1/model.py +0 -60
  255. clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/config.pbtxt +0 -22
  256. clarifai_utils/models/model_serving/examples/text_to_image/sd-v1.5/requirements.txt +0 -6
  257. clarifai_utils/models/model_serving/examples/text_to_text/README.md +0 -10
  258. clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/__init__.py +0 -0
  259. clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/inference.py +0 -47
  260. clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/1/model.py +0 -60
  261. clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/config.pbtxt +0 -20
  262. clarifai_utils/models/model_serving/examples/text_to_text/bart-summarize/requirements.txt +0 -4
  263. clarifai_utils/models/model_serving/examples/visual_detection/README.md +0 -11
  264. clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/1/inference.py +0 -72
  265. clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/1/model.py +0 -61
  266. clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/config.pbtxt +0 -36
  267. clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/labels.txt +0 -80
  268. clarifai_utils/models/model_serving/examples/visual_detection/yolov5x/requirements.txt +0 -12
  269. clarifai_utils/models/model_serving/examples/visual_embedding/README.md +0 -9
  270. clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/__init__.py +0 -0
  271. clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/inference.py +0 -51
  272. clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/1/model.py +0 -60
  273. clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/config.pbtxt +0 -22
  274. clarifai_utils/models/model_serving/examples/visual_embedding/vit-base/requirements.txt +0 -5
  275. clarifai_utils/models/model_serving/examples/visual_segmentation/README.md +0 -9
  276. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/__init__.py +0 -0
  277. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/inference.py +0 -55
  278. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/1/model.py +0 -60
  279. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/config.pbtxt +0 -24
  280. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/labels.txt +0 -18
  281. clarifai_utils/models/model_serving/examples/visual_segmentation/segformer-b2/requirements.txt +0 -5
  282. clarifai_utils/models/model_serving/model_config/__init__.py +0 -14
  283. clarifai_utils/models/model_serving/model_config/config.py +0 -302
  284. clarifai_utils/models/model_serving/model_config/inference_parameter.py +0 -124
  285. clarifai_utils/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -24
  286. clarifai_utils/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -18
  287. clarifai_utils/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -18
  288. clarifai_utils/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -18
  289. clarifai_utils/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -18
  290. clarifai_utils/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -18
  291. clarifai_utils/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -28
  292. clarifai_utils/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -18
  293. clarifai_utils/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -18
  294. clarifai_utils/models/model_serving/model_config/serializer.py +0 -134
  295. clarifai_utils/models/model_serving/models/__init__.py +0 -12
  296. clarifai_utils/models/model_serving/models/default_test.py +0 -275
  297. clarifai_utils/models/model_serving/models/inference.py +0 -42
  298. clarifai_utils/models/model_serving/models/model_types.py +0 -265
  299. clarifai_utils/models/model_serving/models/output.py +0 -124
  300. clarifai_utils/models/model_serving/models/pb_model.py +0 -74
  301. clarifai_utils/models/model_serving/models/test.py +0 -64
  302. clarifai_utils/models/model_serving/pb_model_repository.py +0 -101
  303. clarifai_utils/modules/README.md +0 -5
  304. clarifai_utils/modules/__init__.py +0 -0
  305. clarifai_utils/modules/css.py +0 -60
  306. clarifai_utils/modules/pages.py +0 -42
  307. clarifai_utils/modules/style.css +0 -217
  308. clarifai_utils/runners/__init__.py +0 -0
  309. clarifai_utils/runners/example.py +0 -33
  310. clarifai_utils/schema/search.py +0 -69
  311. clarifai_utils/urls/helper.py +0 -103
  312. clarifai_utils/utils/__init__.py +0 -0
  313. clarifai_utils/utils/logging.py +0 -90
  314. clarifai_utils/utils/misc.py +0 -33
  315. clarifai_utils/utils/model_train.py +0 -157
  316. clarifai_utils/versions.py +0 -6
  317. clarifai_utils/workflows/__init__.py +0 -0
  318. clarifai_utils/workflows/export.py +0 -68
  319. clarifai_utils/workflows/utils.py +0 -59
  320. clarifai_utils/workflows/validate.py +0 -67
  321. {clarifai-9.10.1.dist-info → clarifai-9.10.3.dist-info}/LICENSE +0 -0
  322. {clarifai-9.10.1.dist-info → clarifai-9.10.3.dist-info}/WHEEL +0 -0
  323. {clarifai-9.10.1.dist-info → clarifai-9.10.3.dist-info}/entry_points.txt +0 -0
@@ -1,54 +0,0 @@
1
- from typing import Any, Callable, Dict, Generator
2
-
3
- from clarifai_grpc.grpc.api.status import status_code_pb2
4
- from google.protobuf.json_format import MessageToDict
5
-
6
- from clarifai.client.base import BaseClient
7
-
8
-
9
- class Lister(BaseClient):
10
- """Lister class for obtaining paginated results from the Clarifai API."""
11
-
12
- def __init__(self, page_size: int = 16):
13
- self.default_page_size = page_size
14
-
15
- def list_pages_generator(self,
16
- endpoint: Callable,
17
- proto_message: Any,
18
- request_data: Dict[str, Any],
19
- page_no: int = None,
20
- per_page: int = None) -> Generator[Dict[str, Any], None, None]:
21
- """Lists pages of a resource.
22
-
23
- Args:
24
- endpoint (Callable): The endpoint to call.
25
- proto_message (Any): The proto message to use.
26
- request_data (dict): The request data to use.
27
- page_no (int): The page number to list.
28
- per_page (int): The number of items per page.
29
-
30
- Yields:
31
- response_dict: The next item in the listing.
32
- """
33
- page = 1 if not page_no else page_no
34
- if page_no and not per_page:
35
- per_page = self.default_page_size
36
- while True:
37
- request_data['page'] = page
38
- request_data['per_page'] = per_page
39
- response = self._grpc_request(endpoint, proto_message(**request_data))
40
- dict_response = MessageToDict(response, preserving_proto_field_name=True)
41
- if response.status.code != status_code_pb2.SUCCESS:
42
- raise Exception(f"Listing failed with response {response!r}")
43
- if len(list(dict_response.keys())) == 1:
44
- break
45
- else:
46
- listing_resource = list(dict_response.keys())[1]
47
- for item in dict_response[listing_resource]:
48
- if listing_resource == "dataset_inputs":
49
- yield self.process_response_keys(item["input"], listing_resource[:-1])
50
- else:
51
- yield self.process_response_keys(item, listing_resource[:-1])
52
- if page_no is not None or per_page is not None:
53
- break
54
- page += 1
@@ -1,575 +0,0 @@
1
- import os
2
- import time
3
- from typing import Any, Dict, Generator, List
4
-
5
- import requests
6
- import yaml
7
- from clarifai_grpc.grpc.api import resources_pb2, service_pb2
8
- from clarifai_grpc.grpc.api.resources_pb2 import Input
9
- from clarifai_grpc.grpc.api.status import status_code_pb2
10
- from google.protobuf.json_format import MessageToDict
11
- from google.protobuf.struct_pb2 import Struct
12
-
13
- from clarifai.client.base import BaseClient
14
- from clarifai.client.input import Inputs
15
- from clarifai.client.lister import Lister
16
- from clarifai.constants.model import TRAINABLE_MODEL_TYPES
17
- from clarifai.errors import UserError
18
- from clarifai.urls.helper import ClarifaiUrlHelper
19
- from clarifai.utils.logging import get_logger
20
- from clarifai.utils.misc import BackoffIterator
21
- from clarifai.utils.model_train import (find_and_replace_key, params_parser,
22
- response_to_model_params, response_to_param_info,
23
- response_to_templates)
24
-
25
-
26
- class Model(Lister, BaseClient):
27
- """Model is a class that provides access to Clarifai API endpoints related to Model information."""
28
-
29
- def __init__(self,
30
- url_init: str = "",
31
- model_id: str = "",
32
- model_version: Dict = {'id': ""},
33
- base_url: str = "https://api.clarifai.com",
34
- **kwargs):
35
- """Initializes a Model object.
36
-
37
- Args:
38
- url_init (str): The URL to initialize the model object.
39
- model_id (str): The Model ID to interact with.
40
- model_version (dict): The Model Version to interact with.
41
- base_url (str): Base API url. Default "https://api.clarifai.com"
42
- **kwargs: Additional keyword arguments to be passed to the Model.
43
- """
44
- if url_init != "" and model_id != "":
45
- raise UserError("You can only specify one of url_init or model_id.")
46
- if url_init == "" and model_id == "":
47
- raise UserError("You must specify one of url_init or model_id.")
48
- if url_init != "":
49
- user_id, app_id, _, model_id, model_version_id = ClarifaiUrlHelper.split_clarifai_url(
50
- url_init)
51
- model_version = {'id': model_version_id}
52
- kwargs = {'user_id': user_id, 'app_id': app_id}
53
- self.kwargs = {**kwargs, 'id': model_id, 'model_version': model_version,}
54
- self.model_info = resources_pb2.Model(**self.kwargs)
55
- self.logger = get_logger(logger_level="INFO")
56
- self.training_params = {}
57
- BaseClient.__init__(self, user_id=self.user_id, app_id=self.app_id, base=base_url)
58
- Lister.__init__(self)
59
-
60
- def list_training_templates(self) -> List[str]:
61
- """Lists all the training templates for the model type.
62
-
63
- Returns:
64
- templates (List): List of training templates for the model type.
65
-
66
- Example:
67
- >>> from clarifai.client.model import Model
68
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
69
- >>> print(model.list_training_templates())
70
- """
71
- if not self.model_info.model_type_id:
72
- self.load_info()
73
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
74
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
75
- request = service_pb2.ListModelTypesRequest(user_app_id=self.user_app_id,)
76
- response = self._grpc_request(self.STUB.ListModelTypes, request)
77
- if response.status.code != status_code_pb2.SUCCESS:
78
- raise Exception(response.status)
79
- templates = response_to_templates(
80
- response=response, model_type_id=self.model_info.model_type_id)
81
-
82
- return templates
83
-
84
- def get_params(self, template: str = None, save_to: str = 'params.yaml') -> Dict[str, Any]:
85
- """Returns the model params for the model type and yaml file.
86
-
87
- Args:
88
- template (str): The template to use for the model type.
89
- yaml_file (str): The yaml file to save the model params.
90
-
91
- Returns:
92
- params (Dict): Dictionary of model params for the model type.
93
-
94
- Example:
95
- >>> from clarifai.client.model import Model
96
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
97
- >>> model_params = model.get_params(template='template', yaml_file='model_params.yaml')
98
- """
99
- if not self.model_info.model_type_id:
100
- self.load_info()
101
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
102
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
103
- if template is None and self.model_info.model_type_id not in [
104
- "clusterer", "embedding-classifier"
105
- ]:
106
- raise UserError(
107
- f"Template should be provided for {self.model_info.model_type_id} model type")
108
- if template is not None and self.model_info.model_type_id in [
109
- "clusterer", "embedding-classifier"
110
- ]:
111
- raise UserError(
112
- f"Template should not be provided for {self.model_info.model_type_id} model type")
113
-
114
- request = service_pb2.ListModelTypesRequest(user_app_id=self.user_app_id,)
115
- response = self._grpc_request(self.STUB.ListModelTypes, request)
116
- if response.status.code != status_code_pb2.SUCCESS:
117
- raise Exception(response.status)
118
- params = response_to_model_params(
119
- response=response, model_type_id=self.model_info.model_type_id, template=template)
120
- #yaml file
121
- assert save_to.endswith('.yaml'), "File extension should be .yaml"
122
- with open(save_to, 'w') as f:
123
- yaml.dump(params, f, default_flow_style=False, sort_keys=False)
124
- #updating the global model params
125
- self.training_params.update(params)
126
-
127
- return params
128
-
129
- def update_params(self, **kwargs) -> None:
130
- """Updates the model params for the model.
131
-
132
- Args:
133
- **kwargs: model params to update.
134
-
135
- Example:
136
- >>> from clarifai.client.model import Model
137
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
138
- >>> model_params = model.get_params(template='template', yaml_file='model_params.yaml')
139
- >>> model.update_params(batch_size = 8, dataset_version = 'dataset_version_id')
140
- """
141
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
142
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
143
- if len(self.training_params) == 0:
144
- raise UserError(
145
- f"Run 'model.get_params' to get the params for the {self.model_info.model_type_id} model type"
146
- )
147
- #getting all the keys in nested dictionary
148
- all_keys = [key for key in self.training_params.keys()] + [
149
- key for key in self.training_params.values() if isinstance(key, dict) for key in key
150
- ]
151
- #checking if the given params are valid
152
- if not set(kwargs.keys()).issubset(all_keys):
153
- raise UserError("Invalid params")
154
- #updating the global model params
155
- for key, value in kwargs.items():
156
- find_and_replace_key(self.training_params, key, value)
157
-
158
- def get_param_info(self, param: str) -> Dict[str, Any]:
159
- """Returns the param info for the param.
160
-
161
- Args:
162
- param (str): The param to get the info for.
163
-
164
- Returns:
165
- param_info (Dict): Dictionary of model param info for the param.
166
-
167
- Example:
168
- >>> from clarifai.client.model import Model
169
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
170
- >>> model_params = model.get_params(template='template', yaml_file='model_params.yaml')
171
- >>> model.get_param_info('param')
172
- """
173
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
174
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
175
- if len(self.training_params) == 0:
176
- raise UserError(
177
- f"Run 'model.get_params' to get the params for the {self.model_info.model_type_id} model type"
178
- )
179
-
180
- all_keys = [key for key in self.training_params.keys()] + [
181
- key for key in self.training_params.values() if isinstance(key, dict) for key in key
182
- ]
183
- if param not in all_keys:
184
- raise UserError(f"Invalid param: '{param}' for model type '{self.model_info.model_type_id}'")
185
- template = self.training_params['train_params']['template'] if 'template' in all_keys else None
186
-
187
- request = service_pb2.ListModelTypesRequest(user_app_id=self.user_app_id,)
188
- response = self._grpc_request(self.STUB.ListModelTypes, request)
189
- if response.status.code != status_code_pb2.SUCCESS:
190
- raise Exception(response.status)
191
- param_info = response_to_param_info(
192
- response=response,
193
- model_type_id=self.model_info.model_type_id,
194
- param=param,
195
- template=template)
196
-
197
- return param_info
198
-
199
- def train(self, yaml_file: str = None) -> str:
200
- """Trains the model based on the given yaml file or model params.
201
-
202
- Args:
203
- yaml_file (str): The yaml file for the model params.
204
-
205
- Returns:
206
- model_version_id (str): The model version ID for the model.
207
-
208
- Example:
209
- >>> from clarifai.client.model import Model
210
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
211
- >>> model_params = model.get_params(template='template', yaml_file='model_params.yaml')
212
- >>> model.train('model_params.yaml')
213
- """
214
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
215
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
216
- if not yaml_file and len(self.training_params) == 0:
217
- raise UserError("Provide yaml file or run 'model.get_params()'")
218
-
219
- if yaml_file:
220
- with open(yaml_file, 'r') as file:
221
- params_dict = yaml.safe_load(file)
222
- else:
223
- params_dict = self.training_params
224
-
225
- train_dict = params_parser(params_dict)
226
- request = service_pb2.PostModelVersionsRequest(
227
- user_app_id=self.user_app_id,
228
- model_id=self.id,
229
- model_versions=[resources_pb2.ModelVersion(**train_dict)])
230
- response = self._grpc_request(self.STUB.PostModelVersions, request)
231
- if response.status.code != status_code_pb2.SUCCESS:
232
- raise Exception(response.status)
233
- self.logger.info("\nModel Training Started\n%s", response.status)
234
-
235
- return response.model.model_version.id
236
-
237
- def training_status(self, version_id: str, training_logs: bool = False) -> Dict[str, str]:
238
- """Get the training status for the model version. Also stores training logs
239
-
240
- Args:
241
- version_id (str): The version ID to get the training status for.
242
- training_logs (bool): Whether to save the training logs in a file.
243
-
244
- Returns:
245
- training_status (Dict): Dictionary of training status for the model version.
246
-
247
- Example:
248
- >>> from clarifai.client.model import Model
249
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
250
- >>> model.training_status(version_id='version_id',training_logs=True)
251
- """
252
- if self.model_info.model_type_id not in TRAINABLE_MODEL_TYPES:
253
- raise UserError(f"Model type {self.model_info.model_type_id} is not trainable")
254
-
255
- request = service_pb2.GetModelVersionRequest(
256
- user_app_id=self.user_app_id, model_id=self.id, version_id=version_id)
257
- response = self._grpc_request(self.STUB.GetModelVersion, request)
258
- if response.status.code != status_code_pb2.SUCCESS:
259
- raise Exception(response.status)
260
-
261
- if training_logs:
262
- try:
263
- if response.model_version.train_log:
264
- log_response = requests.get(response.model_version.train_log)
265
- log_response.raise_for_status() # Check for any HTTP errors
266
- with open(version_id + '.log', 'wb') as file:
267
- for chunk in log_response.iter_content(chunk_size=4096): # 4KB
268
- file.write(chunk)
269
- self.logger.info(f"\nTraining logs are saving in '{version_id+'.log'}' file")
270
-
271
- except requests.exceptions.RequestException as e:
272
- raise Exception(f"An error occurred while getting training logs: {e}")
273
-
274
- return response.model_version.status
275
-
276
- def delete_version(self, version_id: str) -> None:
277
- """Deletes a model version for the Model.
278
-
279
- Args:
280
- version_id (str): The version ID to delete.
281
-
282
- Example:
283
- >>> from clarifai.client.model import Model
284
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
285
- >>> model.delete_version(version_id='version_id')
286
- """
287
- request = service_pb2.DeleteModelVersionRequest(
288
- user_app_id=self.user_app_id, model_id=self.id, version_id=version_id)
289
-
290
- response = self._grpc_request(self.STUB.DeleteModelVersion, request)
291
- if response.status.code != status_code_pb2.SUCCESS:
292
- raise Exception(response.status)
293
- self.logger.info("\nModel Version Deleted\n%s", response.status)
294
-
295
- def create_version(self, **kwargs) -> 'Model':
296
- """Creates a model version for the Model.
297
-
298
- Args:
299
- **kwargs: Additional keyword arguments to be passed to Model Version.
300
- - description (str): The description of the model version.
301
- - concepts (list[Concept]): The concepts to associate with the model version.
302
- - output_info (resources_pb2.OutputInfo(): The output info to associate with the model version.
303
-
304
- Returns:
305
- Model: A Model object for the specified model ID.
306
-
307
- Example:
308
- >>> from clarifai.client.model import Model
309
- >>> model = Model("model_url")
310
- or
311
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
312
- >>> model_version = model.create_version(description='model_version_description')
313
- """
314
- if self.model_info.model_type_id in TRAINABLE_MODEL_TYPES:
315
- raise UserError(
316
- f"{self.model_info.model_type_id} is a trainable model type. Use 'model.train()' to train the model"
317
- )
318
-
319
- request = service_pb2.PostModelVersionsRequest(
320
- user_app_id=self.user_app_id,
321
- model_id=self.id,
322
- model_versions=[resources_pb2.ModelVersion(**kwargs)])
323
-
324
- response = self._grpc_request(self.STUB.PostModelVersions, request)
325
- if response.status.code != status_code_pb2.SUCCESS:
326
- raise Exception(response.status)
327
- self.logger.info("\nModel Version created\n%s", response.status)
328
-
329
- kwargs.update({'app_id': self.app_id, 'user_id': self.user_id})
330
- dict_response = MessageToDict(response, preserving_proto_field_name=True)
331
- kwargs = self.process_response_keys(dict_response['model'], 'model')
332
-
333
- return Model(base_url=self.base, **kwargs)
334
-
335
- def list_versions(self, page_no: int = None,
336
- per_page: int = None) -> Generator['Model', None, None]:
337
- """Lists all the versions for the model.
338
-
339
- Args:
340
- page_no (int): The page number to list.
341
- per_page (int): The number of items per page.
342
-
343
- Yields:
344
- Model: Model objects for the versions of the model.
345
-
346
- Example:
347
- >>> from clarifai.client.model import Model
348
- >>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
349
- or
350
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
351
- >>> all_model_versions = list(model.list_versions())
352
-
353
- Note:
354
- Defaults to 16 per page if page_no is specified and per_page is not specified.
355
- If both page_no and per_page are None, then lists all the resources.
356
- """
357
- request_data = dict(
358
- user_app_id=self.user_app_id,
359
- model_id=self.id,
360
- )
361
- all_model_versions_info = self.list_pages_generator(
362
- self.STUB.ListModelVersions,
363
- service_pb2.ListModelVersionsRequest,
364
- request_data,
365
- per_page=per_page,
366
- page_no=page_no)
367
-
368
- for model_version_info in all_model_versions_info:
369
- model_version_info['id'] = model_version_info['model_version_id']
370
- del model_version_info['model_version_id']
371
- try:
372
- del model_version_info['train_info']['dataset']['version']['metrics']
373
- except KeyError:
374
- pass
375
- yield Model(
376
- model_id=self.id,
377
- base_url=self.base,
378
- **dict(self.kwargs, model_version=model_version_info))
379
-
380
- def predict(self, inputs: List[Input], inference_params: Dict = {}, output_config: Dict = {}):
381
- """Predicts the model based on the given inputs.
382
-
383
- Args:
384
- inputs (list[Input]): The inputs to predict, must be less than 128.
385
- """
386
- if not isinstance(inputs, list):
387
- raise UserError('Invalid inputs, inputs must be a list of Input objects.')
388
- if len(inputs) > 128:
389
- raise UserError("Too many inputs. Max is 128.") # TODO Use Chunker for inputs len > 128
390
-
391
- self._override_model_version(inference_params, output_config)
392
- request = service_pb2.PostModelOutputsRequest(
393
- user_app_id=self.user_app_id,
394
- model_id=self.id,
395
- version_id=self.model_version.id,
396
- inputs=inputs,
397
- model=self.model_info)
398
-
399
- start_time = time.time()
400
- backoff_iterator = BackoffIterator()
401
- while True:
402
- response = self._grpc_request(self.STUB.PostModelOutputs, request)
403
-
404
- if response.status.code == status_code_pb2.MODEL_DEPLOYING and \
405
- time.time() - start_time < 60 * 10: # 10 minutes
406
- self.logger.info(f"{self.id} model is still deploying, please wait...")
407
- time.sleep(next(backoff_iterator))
408
- continue
409
-
410
- if response.status.code != status_code_pb2.SUCCESS:
411
- raise Exception(f"Model Predict failed with response {response.status!r}")
412
- else:
413
- break
414
-
415
- return response
416
-
417
- def predict_by_filepath(self,
418
- filepath: str,
419
- input_type: str,
420
- inference_params: Dict = {},
421
- output_config: Dict = {}):
422
- """Predicts the model based on the given filepath.
423
-
424
- Args:
425
- filepath (str): The filepath to predict.
426
- input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
427
- inference_params (dict): The inference params to override.
428
- output_config (dict): The output config to override.
429
- min_value (float): The minimum value of the prediction confidence to filter.
430
- max_concepts (int): The maximum number of concepts to return.
431
- select_concepts (list[Concept]): The concepts to select.
432
-
433
- Example:
434
- >>> from clarifai.client.model import Model
435
- >>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
436
- or
437
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
438
- >>> model_prediction = model.predict_by_filepath('/path/to/image.jpg', 'image')
439
- >>> model_prediction = model.predict_by_filepath('/path/to/text.txt', 'text')
440
- """
441
- if not os.path.isfile(filepath):
442
- raise UserError('Invalid filepath.')
443
-
444
- with open(filepath, "rb") as f:
445
- file_bytes = f.read()
446
-
447
- return self.predict_by_bytes(file_bytes, input_type, inference_params, output_config)
448
-
449
- def predict_by_bytes(self,
450
- input_bytes: bytes,
451
- input_type: str,
452
- inference_params: Dict = {},
453
- output_config: Dict = {}):
454
- """Predicts the model based on the given bytes.
455
-
456
- Args:
457
- input_bytes (bytes): File Bytes to predict on.
458
- input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
459
- inference_params (dict): The inference params to override.
460
- output_config (dict): The output config to override.
461
- min_value (float): The minimum value of the prediction confidence to filter.
462
- max_concepts (int): The maximum number of concepts to return.
463
- select_concepts (list[Concept]): The concepts to select.
464
-
465
- Example:
466
- >>> from clarifai.client.model import Model
467
- >>> model = Model("https://clarifai.com/openai/chat-completion/models/GPT-4")
468
- >>> model_prediction = model.predict_by_bytes(b'Write a tweet on future of AI',
469
- input_type='text',
470
- inference_params=dict(temperature=str(0.7), max_tokens=30)))
471
- """
472
- if input_type not in {'image', 'text', 'video', 'audio'}:
473
- raise UserError(
474
- f"Got input type {input_type} but expected one of image, text, video, audio.")
475
- if not isinstance(input_bytes, bytes):
476
- raise UserError('Invalid bytes.')
477
-
478
- if input_type == "image":
479
- input_proto = Inputs().get_input_from_bytes("", image_bytes=input_bytes)
480
- elif input_type == "text":
481
- input_proto = Inputs().get_input_from_bytes("", text_bytes=input_bytes)
482
- elif input_type == "video":
483
- input_proto = Inputs().get_input_from_bytes("", video_bytes=input_bytes)
484
- elif input_type == "audio":
485
- input_proto = Inputs().get_input_from_bytes("", audio_bytes=input_bytes)
486
-
487
- return self.predict(
488
- inputs=[input_proto], inference_params=inference_params, output_config=output_config)
489
-
490
- def predict_by_url(self,
491
- url: str,
492
- input_type: str,
493
- inference_params: Dict = {},
494
- output_config: Dict = {}):
495
- """Predicts the model based on the given URL.
496
-
497
- Args:
498
- url (str): The URL to predict.
499
- input_type (str): The type of input. Can be 'image', 'text', 'video' or 'audio.
500
- inference_params (dict): The inference params to override.
501
- output_config (dict): The output config to override.
502
- min_value (float): The minimum value of the prediction confidence to filter.
503
- max_concepts (int): The maximum number of concepts to return.
504
- select_concepts (list[Concept]): The concepts to select.
505
-
506
- Example:
507
- >>> from clarifai.client.model import Model
508
- >>> model = Model("model_url") # Example URL: https://clarifai.com/clarifai/main/models/general-image-recognition
509
- or
510
- >>> model = Model(model_id='model_id', user_id='user_id', app_id='app_id')
511
- >>> model_prediction = model.predict_by_url('url', 'image')
512
- """
513
- if input_type not in {'image', 'text', 'video', 'audio'}:
514
- raise UserError(
515
- f"Got input type {input_type} but expected one of image, text, video, audio.")
516
-
517
- if input_type == "image":
518
- input_proto = Inputs().get_input_from_url("", image_url=url)
519
- elif input_type == "text":
520
- input_proto = Inputs().get_input_from_url("", text_url=url)
521
- elif input_type == "video":
522
- input_proto = Inputs().get_input_from_url("", video_url=url)
523
- elif input_type == "audio":
524
- input_proto = Inputs().get_input_from_url("", audio_url=url)
525
-
526
- return self.predict(
527
- inputs=[input_proto], inference_params=inference_params, output_config=output_config)
528
-
529
- def _override_model_version(self, inference_params: Dict = {}, output_config: Dict = {}) -> None:
530
- """Overrides the model version.
531
-
532
- Args:
533
- inference_params (dict): The inference params to override.
534
- output_config (dict): The output config to override.
535
- min_value (float): The minimum value of the prediction confidence to filter.
536
- max_concepts (int): The maximum number of concepts to return.
537
- select_concepts (list[Concept]): The concepts to select.
538
- sample_ms (int): The number of milliseconds to sample.
539
- """
540
- if inference_params is not None:
541
- params = Struct()
542
- params.update(inference_params)
543
-
544
- self.model_info.model_version.output_info.CopyFrom(
545
- resources_pb2.OutputInfo(
546
- output_config=resources_pb2.OutputConfig(**output_config), params=params))
547
-
548
- def load_info(self) -> None:
549
- """Loads the model info."""
550
- request = service_pb2.GetModelRequest(
551
- user_app_id=self.user_app_id,
552
- model_id=self.id,
553
- version_id=self.model_info.model_version.id)
554
- response = self._grpc_request(self.STUB.GetModel, request)
555
-
556
- if response.status.code != status_code_pb2.SUCCESS:
557
- raise Exception(response.status)
558
-
559
- dict_response = MessageToDict(response, preserving_proto_field_name=True)
560
- self.kwargs = self.process_response_keys(dict_response['model'])
561
- self.model_info = resources_pb2.Model(**self.kwargs)
562
-
563
- def __getattr__(self, name):
564
- return getattr(self.model_info, name)
565
-
566
- def __str__(self):
567
- if len(self.kwargs) < 10:
568
- self.load_info()
569
-
570
- init_params = [param for param in self.kwargs.keys()]
571
- attribute_strings = [
572
- f"{param}={getattr(self.model_info, param)}" for param in init_params
573
- if hasattr(self.model_info, param)
574
- ]
575
- return f"Model Details: \n{', '.join(attribute_strings)}\n"