clarifai 11.3.0rc2__py3-none-any.whl → 11.4.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (300) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/cli/__main__.py +1 -1
  3. clarifai/cli/base.py +144 -136
  4. clarifai/cli/compute_cluster.py +45 -31
  5. clarifai/cli/deployment.py +93 -76
  6. clarifai/cli/model.py +578 -180
  7. clarifai/cli/nodepool.py +100 -82
  8. clarifai/client/__init__.py +12 -2
  9. clarifai/client/app.py +973 -911
  10. clarifai/client/auth/helper.py +345 -342
  11. clarifai/client/auth/register.py +7 -7
  12. clarifai/client/auth/stub.py +107 -106
  13. clarifai/client/base.py +185 -178
  14. clarifai/client/compute_cluster.py +214 -180
  15. clarifai/client/dataset.py +793 -698
  16. clarifai/client/deployment.py +55 -50
  17. clarifai/client/input.py +1223 -1088
  18. clarifai/client/lister.py +47 -45
  19. clarifai/client/model.py +1939 -1717
  20. clarifai/client/model_client.py +525 -502
  21. clarifai/client/module.py +82 -73
  22. clarifai/client/nodepool.py +358 -213
  23. clarifai/client/runner.py +58 -0
  24. clarifai/client/search.py +342 -309
  25. clarifai/client/user.py +419 -414
  26. clarifai/client/workflow.py +294 -274
  27. clarifai/constants/dataset.py +11 -17
  28. clarifai/constants/model.py +8 -2
  29. clarifai/datasets/export/inputs_annotations.py +233 -217
  30. clarifai/datasets/upload/base.py +63 -51
  31. clarifai/datasets/upload/features.py +43 -38
  32. clarifai/datasets/upload/image.py +237 -207
  33. clarifai/datasets/upload/loaders/coco_captions.py +34 -32
  34. clarifai/datasets/upload/loaders/coco_detection.py +72 -65
  35. clarifai/datasets/upload/loaders/imagenet_classification.py +57 -53
  36. clarifai/datasets/upload/loaders/xview_detection.py +274 -132
  37. clarifai/datasets/upload/multimodal.py +55 -46
  38. clarifai/datasets/upload/text.py +55 -47
  39. clarifai/datasets/upload/utils.py +250 -234
  40. clarifai/errors.py +51 -50
  41. clarifai/models/api.py +260 -238
  42. clarifai/modules/css.py +50 -50
  43. clarifai/modules/pages.py +33 -33
  44. clarifai/rag/rag.py +312 -288
  45. clarifai/rag/utils.py +91 -84
  46. clarifai/runners/models/model_builder.py +906 -802
  47. clarifai/runners/models/model_class.py +370 -331
  48. clarifai/runners/models/model_run_locally.py +459 -419
  49. clarifai/runners/models/model_runner.py +170 -162
  50. clarifai/runners/models/model_servicer.py +78 -70
  51. clarifai/runners/server.py +111 -101
  52. clarifai/runners/utils/code_script.py +225 -187
  53. clarifai/runners/utils/const.py +4 -1
  54. clarifai/runners/utils/data_types/__init__.py +12 -0
  55. clarifai/runners/utils/data_types/data_types.py +598 -0
  56. clarifai/runners/utils/data_utils.py +387 -440
  57. clarifai/runners/utils/loader.py +247 -227
  58. clarifai/runners/utils/method_signatures.py +411 -386
  59. clarifai/runners/utils/openai_convertor.py +108 -109
  60. clarifai/runners/utils/serializers.py +175 -179
  61. clarifai/runners/utils/url_fetcher.py +35 -35
  62. clarifai/schema/search.py +56 -63
  63. clarifai/urls/helper.py +125 -102
  64. clarifai/utils/cli.py +129 -123
  65. clarifai/utils/config.py +127 -87
  66. clarifai/utils/constants.py +49 -0
  67. clarifai/utils/evaluation/helpers.py +503 -466
  68. clarifai/utils/evaluation/main.py +431 -393
  69. clarifai/utils/evaluation/testset_annotation_parser.py +154 -144
  70. clarifai/utils/logging.py +324 -306
  71. clarifai/utils/misc.py +60 -56
  72. clarifai/utils/model_train.py +165 -146
  73. clarifai/utils/protobuf.py +126 -103
  74. clarifai/versions.py +3 -1
  75. clarifai/workflows/export.py +48 -50
  76. clarifai/workflows/utils.py +39 -36
  77. clarifai/workflows/validate.py +55 -43
  78. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/METADATA +16 -6
  79. clarifai-11.4.0.dist-info/RECORD +109 -0
  80. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/WHEEL +1 -1
  81. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  82. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  83. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  84. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  85. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  86. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  87. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  88. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  89. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  90. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  91. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  92. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  93. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  94. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  95. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  96. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  97. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  98. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  99. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  100. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  101. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  102. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  103. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  104. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  105. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  106. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  107. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  108. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  109. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  110. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  111. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  112. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  113. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  114. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  115. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  116. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  117. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  118. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  119. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  120. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  121. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  122. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  123. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  124. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  125. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  126. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  127. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  128. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  129. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  130. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  131. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  132. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  133. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  134. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  135. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  136. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  137. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  138. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  139. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  140. clarifai/client/cli/__init__.py +0 -0
  141. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  142. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  143. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  144. clarifai/client/cli/base_cli.py +0 -88
  145. clarifai/client/cli/model_cli.py +0 -29
  146. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  147. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  148. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  149. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  150. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  151. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  152. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  153. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  154. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  155. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  156. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  157. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  158. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  159. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  160. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  161. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  162. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  163. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  164. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  165. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  166. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  167. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  168. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  169. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  170. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  171. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  172. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  173. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  174. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  175. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  176. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  177. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  178. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  179. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  180. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  181. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  182. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  183. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  184. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  185. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  186. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  187. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  188. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  189. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  190. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  191. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  192. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  193. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  194. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  195. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  196. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  197. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  198. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  199. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  200. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
  201. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
  202. clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
  203. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  204. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  205. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  206. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  207. clarifai/runners/models/__pycache__/base_typed_model.cpython-311.pyc +0 -0
  208. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  209. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  210. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  211. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  212. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  213. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  214. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  215. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  216. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  217. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  218. clarifai/runners/models/base_typed_model.py +0 -238
  219. clarifai/runners/models/model_class_refract.py +0 -80
  220. clarifai/runners/models/model_upload.py +0 -607
  221. clarifai/runners/models/temp.py +0 -25
  222. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  223. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  224. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  225. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  226. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  227. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  228. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  229. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  230. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  231. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  232. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  233. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  234. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  235. clarifai/runners/utils/__pycache__/data_handler.cpython-311.pyc +0 -0
  236. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  237. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  238. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  239. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  240. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  241. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  242. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  243. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  244. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  245. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  246. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  247. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  248. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  249. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  250. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  251. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  252. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  253. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  254. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  255. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  256. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  257. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  258. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  259. clarifai/runners/utils/data_handler.py +0 -231
  260. clarifai/runners/utils/data_handler_refract.py +0 -213
  261. clarifai/runners/utils/data_types.py +0 -469
  262. clarifai/runners/utils/logger.py +0 -0
  263. clarifai/runners/utils/openai_format.py +0 -87
  264. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  265. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  266. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  267. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  268. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  269. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  270. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  271. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  272. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  273. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  274. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  275. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  276. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  277. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  278. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  279. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  280. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  281. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  282. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  283. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  284. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  285. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  286. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  287. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  288. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  289. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  290. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  291. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  292. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  293. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  294. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  295. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  296. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  297. clarifai-11.3.0rc2.dist-info/RECORD +0 -322
  298. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/entry_points.txt +0 -0
  299. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info/licenses}/LICENSE +0 -0
  300. {clarifai-11.3.0rc2.dist-info → clarifai-11.4.0.dist-info}/top_level.txt +0 -0
@@ -12,243 +12,259 @@ from clarifai.errors import UserError
12
12
 
13
13
 
14
14
  def load_module_dataloader(module_dir: Union[str, os.PathLike], **kwargs) -> ClarifaiDataLoader:
15
- """Validate and import dataset module data generator.
16
- Args:
17
- `module_dir`: relative path to the module directory
18
- The directory must contain a `dataset.py` script and the data itself.
19
- kwargs: keyword arguments to be passed to the dataloader class
20
- Module Directory Structure:
21
- ---------------------------
22
- <folder_name>/
23
- ├──__init__.py
24
- ├──<Your local dir dataset>/
25
- └──dataset.py
26
- dataset.py must implement a class named following the convention,
27
- <dataset_name>DataLoader and this class must inherit from base ClarifaiDataLoader()
28
- """
29
- module_path = os.path.join(module_dir, "dataset.py")
30
- spec = importlib.util.spec_from_file_location("dataset", module_path)
31
-
32
- if not spec:
33
- raise ImportError(f"Module not found at {module_path}")
34
-
35
- # Load the module using the spec
36
- dataset = importlib.util.module_from_spec(spec)
37
- # Execute the module to make its contents available
38
- spec.loader.exec_module(dataset)
39
-
40
- # get main module class
41
- main_module_cls = None
42
- for name, obj in dataset.__dict__.items():
43
- if inspect.isclass(obj) and "DataLoader" in name:
44
- main_module_cls = obj
45
- else:
46
- continue
47
-
48
- return main_module_cls(**kwargs)
49
-
50
-
51
- class DisplayUploadStatus:
52
- """Class to display dataset upload status."""
53
-
54
- def __init__(self, dataloader: ClarifaiDataLoader,
55
- dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse],
56
- dataset_info_dict: Dict[str, str],
57
- pre_upload_stats: Tuple[Dict[str, int], Dict[str, int]]) -> None:
58
- """Initialize the class.
15
+ """Validate and import dataset module data generator.
59
16
  Args:
60
- dataloader: ClarifaiDataLoader object
61
- dataset_metrics_response: The dataset version metrics response from the server.
62
- dataset_info_dict: The dataset info dictionary.
63
- pre_upload_stats: The pre upload stats for the dataset.
64
- """
65
- self.dataloader = dataloader
66
- self.dataset_metrics_response = dataset_metrics_response
67
- self.dataset_info_dict = dataset_info_dict
68
- self.pre_upload_stats = pre_upload_stats
69
-
70
- self.display()
71
-
72
- def display(self) -> None:
73
- """Display the upload status."""
74
- from rich.console import Console
75
-
76
- local_inputs_count, local_annotations_dict = self.get_dataloader_stats()
77
- uploaded_inputs_dict, uploaded_annotations_dict = self.get_dataset_version_stats(
78
- self.dataset_metrics_response)
79
-
80
- # Subtract the pre upload stats from the uploaded stats
81
- uploaded_inputs_dict = {
82
- key: int(uploaded_inputs_dict[key]) - int(self.pre_upload_stats[0].get(key, 0))
83
- for key in uploaded_inputs_dict
84
- }
85
- uploaded_annotations_dict = {
86
- key: uploaded_annotations_dict[key] - self.pre_upload_stats[1].get(key, 0)
87
- for key in uploaded_annotations_dict
88
- }
89
-
90
- self.local_annotations_count = sum(local_annotations_dict.values())
91
- self.uploaded_annotations_count = sum(uploaded_annotations_dict.values())
92
-
93
- local_dataset_dict = {
94
- "Inputs Count": str(local_inputs_count),
95
- "Annotations Count": str(local_annotations_dict)
96
- }
97
- uploaded_dataset_dict = {
98
- "Inputs Count": str(uploaded_inputs_dict["total"]),
99
- "Annotations Count": str(uploaded_annotations_dict)
100
- }
101
-
102
- panel_layout = self.get_display_layout(local_dataset_dict, uploaded_dataset_dict)
103
-
104
- console = Console()
105
- console.print(panel_layout)
106
-
107
- def get_dataloader_stats(self) -> Tuple[int, Dict[str, int]]:
108
- """Get the number of inputs and annotations in a dataloader.
109
-
110
- Returns:
111
- local_inputs_count (int): total number of inputs in the dataloader
112
- local_annotations_dict (Dict[str, int]): total number of annotations in the dataloader
17
+ `module_dir`: relative path to the module directory
18
+ The directory must contain a `dataset.py` script and the data itself.
19
+ kwargs: keyword arguments to be passed to the dataloader class
20
+ Module Directory Structure:
21
+ ---------------------------
22
+ <folder_name>/
23
+ ├──__init__.py
24
+ ├──<Your local dir dataset>/
25
+ └──dataset.py
26
+ dataset.py must implement a class named following the convention,
27
+ <dataset_name>DataLoader and this class must inherit from base ClarifaiDataLoader()
113
28
  """
114
- from clarifai.constants.dataset import DATASET_UPLOAD_TASKS
115
-
116
- task = self.dataloader.task
117
- if task not in DATASET_UPLOAD_TASKS:
118
- raise UserError(
119
- "Invalid task, please use one of the following: {}".format(DATASET_UPLOAD_TASKS))
120
- local_inputs_count = len(self.dataloader)
121
- local_annotations_dict = dict(concepts=0, bboxes=0, polygons=0)
122
- for i in range(local_inputs_count):
123
- key, attr = [(k, v) for k, v in TASK_TO_ANNOTATION_TYPE.get(task).items()][0]
124
- local_annotations_dict[key] += len(getattr(self.dataloader[i], attr))
125
- return local_inputs_count, local_annotations_dict
126
-
127
- @staticmethod
128
- def get_dataset_version_stats(
129
- dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse]
130
- ) -> Tuple[Dict[str, int], Dict[str, int]]:
131
- """Parse the response from the server for the dataset version metrics groups.
132
- Args:
133
- dataset_metrics_response: The dataset version metrics response from the server.
134
-
135
- Returns:
136
- uploaded_inputs_dict (Dict[str, int]): The input statistics for the dataset.
137
- uploaded_annotations_dict (Dict[str, int]): The annotation statistics for the dataset.
138
- """
139
- dataset_statistics = []
140
- uploaded_inputs_dict = {}
141
- uploaded_annotations_dict = dict(concepts=0, bboxes=0, polygons=0)
142
- dict_response = MessageToDict(dataset_metrics_response)
143
-
144
- for data in dict_response["datasetVersionMetricsGroups"]:
145
- if isinstance(data["value"], str):
146
- if ("type" in data) and (data["type"] == "CONCEPT_ID"):
147
- data["metrics"].update({"Concept": data["value"]})
148
- data["metrics"].pop("regionLocationMatrix", None)
149
- dataset_statistics.append(data["metrics"])
29
+ module_path = os.path.join(module_dir, "dataset.py")
30
+ spec = importlib.util.spec_from_file_location("dataset", module_path)
31
+
32
+ if not spec:
33
+ raise ImportError(f"Module not found at {module_path}")
34
+
35
+ # Load the module using the spec
36
+ dataset = importlib.util.module_from_spec(spec)
37
+ # Execute the module to make its contents available
38
+ spec.loader.exec_module(dataset)
39
+
40
+ # get main module class
41
+ main_module_cls = None
42
+ for name, obj in dataset.__dict__.items():
43
+ if inspect.isclass(obj) and "DataLoader" in name:
44
+ main_module_cls = obj
150
45
  else:
151
- uploaded_inputs_dict[data["value"]] = data["metrics"]["inputsCount"]
152
-
153
- for ds in dataset_statistics:
154
- uploaded_annotations_dict["bboxes"] += int(ds["boundingBoxesCount"])
155
- uploaded_annotations_dict["concepts"] += int(ds["positiveInputTagsCount"])
156
- uploaded_annotations_dict["polygons"] += int(ds["polygonsCount"])
157
-
158
- return uploaded_inputs_dict, uploaded_annotations_dict
159
-
160
- def _create_layout(self):
161
- from rich.layout import Layout
162
- from rich.progress import BarColumn, Progress, TextColumn
46
+ continue
163
47
 
164
- # Create a Layout
165
- layout = Layout()
48
+ return main_module_cls(**kwargs)
166
49
 
167
- # Add a new task to the progress bar
168
- progress = Progress(
169
- "{task.description}",
170
- BarColumn(bar_width=100),
171
- TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
172
- )
173
50
 
174
- # Split the layout into top and bottom rows
175
- layout.split(Layout(name="Progress"), Layout(name="Tables"))
176
-
177
- # Add the progress bar to the top layout
178
- layout["Progress"].update(progress)
179
-
180
- # Split the bottom layout into two columns
181
- layout["Tables"].split_row(
182
- Layout(name="Local Dataset"),
183
- Layout(name="Uploaded Dataset"),
184
- )
185
-
186
- # Create a new layout for the panels
187
- panel_layout = Layout(size=18)
188
-
189
- # Split the panel layout into top and bottom rows
190
- panel_layout.split(Layout(name="Progress Panel", size=9), Layout(name="Tables Panel", size=9))
191
-
192
- return layout, panel_layout, progress
193
-
194
- def get_display_layout(self, local_dataset_dict: Dict[str, str],
195
- uploaded_dataset_dict: Dict[str, str]):
196
- """Create a layout for the display.
197
-
198
- Args:
199
- local_dataset_dict (dict): The local dataset stats info dict.
200
- uploaded_dataset_dict (dict): The uploaded dataset stats info dict.
201
-
202
- Returns:
203
- panel_layout (Layout): The panel layout for the display.
204
- """
205
- from rich.console import Group
206
- from rich.panel import Panel
207
-
208
- from clarifai.utils.logging import table_from_dict
209
-
210
- local_dataset_table = table_from_dict(
211
- [local_dataset_dict],
212
- column_names=["Inputs Count", "Annotations Count"],
213
- title="[cyan]Local Dataset")
214
- uploaded_dataset_table = table_from_dict(
215
- [uploaded_dataset_dict],
216
- column_names=["Inputs Count", "Annotations Count"],
217
- title="[cyan]Uploaded Dataset")
218
- dataset_info_table = table_from_dict(
219
- [self.dataset_info_dict], column_names=["dataset_id", "user_id", "app_id"])
220
-
221
- layout, panel_layout, progress = self._create_layout()
222
-
223
- # Add a new task to the progress bar
224
- progress.add_task(
225
- "[cyan]Inputs Progress:",
226
- completed=int(uploaded_dataset_dict["Inputs Count"]),
227
- total=int(local_dataset_dict["Inputs Count"]))
228
- progress.add_task(
229
- "[cyan]Annotations Progress:",
230
- completed=self.uploaded_annotations_count,
231
- total=self.local_annotations_count)
232
-
233
- # Add the tables to the respective layouts
234
- layout["Local Dataset"].update(local_dataset_table)
235
- layout["Uploaded Dataset"].update(uploaded_dataset_table)
236
-
237
- # Create a render group for the progress bar and the additional data
238
- progress_group = Group(progress, dataset_info_table)
239
-
240
- # Create a panel for the progress bar with a blue border and a suitable heading
241
- progress_panel = Panel(progress_group, title="[b] Dataset Upload Summary", border_style="blue")
242
-
243
- # Create a panel for the tables comparison with a blue border and a suitable heading
244
- tables_panel = Panel(
245
- layout["Tables"],
246
- title="[b] Dataset Metrics Comparison",
247
- border_style="blue",
248
- expand=False)
249
-
250
- # Add the panels to the respective layouts
251
- panel_layout["Progress Panel"].update(progress_panel)
252
- panel_layout["Tables Panel"].update(tables_panel)
253
-
254
- return panel_layout
51
+ class DisplayUploadStatus:
52
+ """Class to display dataset upload status."""
53
+
54
+ def __init__(
55
+ self,
56
+ dataloader: ClarifaiDataLoader,
57
+ dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse],
58
+ dataset_info_dict: Dict[str, str],
59
+ pre_upload_stats: Tuple[Dict[str, int], Dict[str, int]],
60
+ ) -> None:
61
+ """Initialize the class.
62
+ Args:
63
+ dataloader: ClarifaiDataLoader object
64
+ dataset_metrics_response: The dataset version metrics response from the server.
65
+ dataset_info_dict: The dataset info dictionary.
66
+ pre_upload_stats: The pre upload stats for the dataset.
67
+ """
68
+ self.dataloader = dataloader
69
+ self.dataset_metrics_response = dataset_metrics_response
70
+ self.dataset_info_dict = dataset_info_dict
71
+ self.pre_upload_stats = pre_upload_stats
72
+
73
+ self.display()
74
+
75
+ def display(self) -> None:
76
+ """Display the upload status."""
77
+ from rich.console import Console
78
+
79
+ local_inputs_count, local_annotations_dict = self.get_dataloader_stats()
80
+ uploaded_inputs_dict, uploaded_annotations_dict = self.get_dataset_version_stats(
81
+ self.dataset_metrics_response
82
+ )
83
+
84
+ # Subtract the pre upload stats from the uploaded stats
85
+ uploaded_inputs_dict = {
86
+ key: int(uploaded_inputs_dict[key]) - int(self.pre_upload_stats[0].get(key, 0))
87
+ for key in uploaded_inputs_dict
88
+ }
89
+ uploaded_annotations_dict = {
90
+ key: uploaded_annotations_dict[key] - self.pre_upload_stats[1].get(key, 0)
91
+ for key in uploaded_annotations_dict
92
+ }
93
+
94
+ self.local_annotations_count = sum(local_annotations_dict.values())
95
+ self.uploaded_annotations_count = sum(uploaded_annotations_dict.values())
96
+
97
+ local_dataset_dict = {
98
+ "Inputs Count": str(local_inputs_count),
99
+ "Annotations Count": str(local_annotations_dict),
100
+ }
101
+ uploaded_dataset_dict = {
102
+ "Inputs Count": str(uploaded_inputs_dict["total"]),
103
+ "Annotations Count": str(uploaded_annotations_dict),
104
+ }
105
+
106
+ panel_layout = self.get_display_layout(local_dataset_dict, uploaded_dataset_dict)
107
+
108
+ console = Console()
109
+ console.print(panel_layout)
110
+
111
+ def get_dataloader_stats(self) -> Tuple[int, Dict[str, int]]:
112
+ """Get the number of inputs and annotations in a dataloader.
113
+
114
+ Returns:
115
+ local_inputs_count (int): total number of inputs in the dataloader
116
+ local_annotations_dict (Dict[str, int]): total number of annotations in the dataloader
117
+ """
118
+ from clarifai.constants.dataset import DATASET_UPLOAD_TASKS
119
+
120
+ task = self.dataloader.task
121
+ if task not in DATASET_UPLOAD_TASKS:
122
+ raise UserError(
123
+ "Invalid task, please use one of the following: {}".format(DATASET_UPLOAD_TASKS)
124
+ )
125
+ local_inputs_count = len(self.dataloader)
126
+ local_annotations_dict = dict(concepts=0, bboxes=0, polygons=0)
127
+ for i in range(local_inputs_count):
128
+ key, attr = [(k, v) for k, v in TASK_TO_ANNOTATION_TYPE.get(task).items()][0]
129
+ local_annotations_dict[key] += len(getattr(self.dataloader[i], attr))
130
+ return local_inputs_count, local_annotations_dict
131
+
132
+ @staticmethod
133
+ def get_dataset_version_stats(
134
+ dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse],
135
+ ) -> Tuple[Dict[str, int], Dict[str, int]]:
136
+ """Parse the response from the server for the dataset version metrics groups.
137
+ Args:
138
+ dataset_metrics_response: The dataset version metrics response from the server.
139
+
140
+ Returns:
141
+ uploaded_inputs_dict (Dict[str, int]): The input statistics for the dataset.
142
+ uploaded_annotations_dict (Dict[str, int]): The annotation statistics for the dataset.
143
+ """
144
+ dataset_statistics = []
145
+ uploaded_inputs_dict = {}
146
+ uploaded_annotations_dict = dict(concepts=0, bboxes=0, polygons=0)
147
+ dict_response = MessageToDict(dataset_metrics_response)
148
+
149
+ for data in dict_response["datasetVersionMetricsGroups"]:
150
+ if isinstance(data["value"], str):
151
+ if ("type" in data) and (data["type"] == "CONCEPT_ID"):
152
+ data["metrics"].update({"Concept": data["value"]})
153
+ data["metrics"].pop("regionLocationMatrix", None)
154
+ dataset_statistics.append(data["metrics"])
155
+ else:
156
+ uploaded_inputs_dict[data["value"]] = data["metrics"]["inputsCount"]
157
+
158
+ for ds in dataset_statistics:
159
+ uploaded_annotations_dict["bboxes"] += int(ds["boundingBoxesCount"])
160
+ uploaded_annotations_dict["concepts"] += int(ds["positiveInputTagsCount"])
161
+ uploaded_annotations_dict["polygons"] += int(ds["polygonsCount"])
162
+
163
+ return uploaded_inputs_dict, uploaded_annotations_dict
164
+
165
+ def _create_layout(self):
166
+ from rich.layout import Layout
167
+ from rich.progress import BarColumn, Progress, TextColumn
168
+
169
+ # Create a Layout
170
+ layout = Layout()
171
+
172
+ # Add a new task to the progress bar
173
+ progress = Progress(
174
+ "{task.description}",
175
+ BarColumn(bar_width=100),
176
+ TextColumn("[progress.percentage]{task.percentage:>3.0f}%"),
177
+ )
178
+
179
+ # Split the layout into top and bottom rows
180
+ layout.split(Layout(name="Progress"), Layout(name="Tables"))
181
+
182
+ # Add the progress bar to the top layout
183
+ layout["Progress"].update(progress)
184
+
185
+ # Split the bottom layout into two columns
186
+ layout["Tables"].split_row(
187
+ Layout(name="Local Dataset"),
188
+ Layout(name="Uploaded Dataset"),
189
+ )
190
+
191
+ # Create a new layout for the panels
192
+ panel_layout = Layout(size=18)
193
+
194
+ # Split the panel layout into top and bottom rows
195
+ panel_layout.split(
196
+ Layout(name="Progress Panel", size=9), Layout(name="Tables Panel", size=9)
197
+ )
198
+
199
+ return layout, panel_layout, progress
200
+
201
+ def get_display_layout(
202
+ self, local_dataset_dict: Dict[str, str], uploaded_dataset_dict: Dict[str, str]
203
+ ):
204
+ """Create a layout for the display.
205
+
206
+ Args:
207
+ local_dataset_dict (dict): The local dataset stats info dict.
208
+ uploaded_dataset_dict (dict): The uploaded dataset stats info dict.
209
+
210
+ Returns:
211
+ panel_layout (Layout): The panel layout for the display.
212
+ """
213
+ from rich.console import Group
214
+ from rich.panel import Panel
215
+
216
+ from clarifai.utils.logging import table_from_dict
217
+
218
+ local_dataset_table = table_from_dict(
219
+ [local_dataset_dict],
220
+ column_names=["Inputs Count", "Annotations Count"],
221
+ title="[cyan]Local Dataset",
222
+ )
223
+ uploaded_dataset_table = table_from_dict(
224
+ [uploaded_dataset_dict],
225
+ column_names=["Inputs Count", "Annotations Count"],
226
+ title="[cyan]Uploaded Dataset",
227
+ )
228
+ dataset_info_table = table_from_dict(
229
+ [self.dataset_info_dict], column_names=["dataset_id", "user_id", "app_id"]
230
+ )
231
+
232
+ layout, panel_layout, progress = self._create_layout()
233
+
234
+ # Add a new task to the progress bar
235
+ progress.add_task(
236
+ "[cyan]Inputs Progress:",
237
+ completed=int(uploaded_dataset_dict["Inputs Count"]),
238
+ total=int(local_dataset_dict["Inputs Count"]),
239
+ )
240
+ progress.add_task(
241
+ "[cyan]Annotations Progress:",
242
+ completed=self.uploaded_annotations_count,
243
+ total=self.local_annotations_count,
244
+ )
245
+
246
+ # Add the tables to the respective layouts
247
+ layout["Local Dataset"].update(local_dataset_table)
248
+ layout["Uploaded Dataset"].update(uploaded_dataset_table)
249
+
250
+ # Create a render group for the progress bar and the additional data
251
+ progress_group = Group(progress, dataset_info_table)
252
+
253
+ # Create a panel for the progress bar with a blue border and a suitable heading
254
+ progress_panel = Panel(
255
+ progress_group, title="[b] Dataset Upload Summary", border_style="blue"
256
+ )
257
+
258
+ # Create a panel for the tables comparison with a blue border and a suitable heading
259
+ tables_panel = Panel(
260
+ layout["Tables"],
261
+ title="[b] Dataset Metrics Comparison",
262
+ border_style="blue",
263
+ expand=False,
264
+ )
265
+
266
+ # Add the panels to the respective layouts
267
+ panel_layout["Progress Panel"].update(progress_panel)
268
+ panel_layout["Tables Panel"].update(tables_panel)
269
+
270
+ return panel_layout
clarifai/errors.py CHANGED
@@ -9,81 +9,82 @@ from clarifai.versions import CLIENT_VERSION, OS_VER, PYTHON_VERSION
9
9
 
10
10
 
11
11
  class TokenError(Exception):
12
- pass
12
+ pass
13
13
 
14
14
 
15
15
  class ApiError(Exception):
16
- """ API Server error """
16
+ """API Server error"""
17
17
 
18
- def __init__(self, resource: str, params: dict, method: str,
19
- response: requests.Response = None) -> None:
20
- self.resource = resource
21
- self.params = params
22
- self.method = method
23
- self.response = response
18
+ def __init__(
19
+ self, resource: str, params: dict, method: str, response: requests.Response = None
20
+ ) -> None:
21
+ self.resource = resource
22
+ self.params = params
23
+ self.method = method
24
+ self.response = response
24
25
 
25
- self.error_code = 'N/A'
26
- self.error_desc = 'N/A'
27
- self.error_details = 'N/A'
28
- response_json = 'N/A'
26
+ self.error_code = 'N/A'
27
+ self.error_desc = 'N/A'
28
+ self.error_details = 'N/A'
29
+ response_json = 'N/A'
29
30
 
30
- if response is not None:
31
- response_json_dict = MessageToDict(response)
31
+ if response is not None:
32
+ response_json_dict = MessageToDict(response)
32
33
 
33
- self.error_code = response_json_dict.get('status', {}).get('code', None)
34
- self.error_desc = response_json_dict.get('status', {}).get('description', None)
35
- self.error_details = response_json_dict.get('status', {}).get('details', None)
36
- response_json = json.dumps(response_json_dict['status'], indent=2)
34
+ self.error_code = response_json_dict.get('status', {}).get('code', None)
35
+ self.error_desc = response_json_dict.get('status', {}).get('description', None)
36
+ self.error_details = response_json_dict.get('status', {}).get('details', None)
37
+ response_json = json.dumps(response_json_dict['status'], indent=2)
37
38
 
38
- current_ts_str = str(time.time())
39
+ current_ts_str = str(time.time())
39
40
 
40
- msg = """%(method)s %(resource)s FAILED(%(time_ts)s). error_code: %(error_code)s, error_description: %(error_desc)s, error_details: %(error_details)s
41
+ msg = """%(method)s %(resource)s FAILED(%(time_ts)s). error_code: %(error_code)s, error_description: %(error_desc)s, error_details: %(error_details)s
41
42
  >> Python client %(client_version)s with Python %(python_version)s on %(os_version)s
42
43
  >> %(method)s %(resource)s
43
44
  >> REQUEST(%(time_ts)s) %(request)s
44
45
  >> RESPONSE(%(time_ts)s) %(response)s""" % {
45
- 'method': method,
46
- 'resource': resource,
47
- 'error_code': self.error_code,
48
- 'error_desc': self.error_desc,
49
- 'error_details': self.error_details,
50
- 'request': json.dumps(params, indent=2),
51
- 'response': response_json,
52
- 'time_ts': current_ts_str,
53
- 'client_version': CLIENT_VERSION,
54
- 'python_version': PYTHON_VERSION,
55
- 'os_version': OS_VER
56
- }
57
-
58
- super(ApiError, self).__init__(msg)
46
+ 'method': method,
47
+ 'resource': resource,
48
+ 'error_code': self.error_code,
49
+ 'error_desc': self.error_desc,
50
+ 'error_details': self.error_details,
51
+ 'request': json.dumps(params, indent=2),
52
+ 'response': response_json,
53
+ 'time_ts': current_ts_str,
54
+ 'client_version': CLIENT_VERSION,
55
+ 'python_version': PYTHON_VERSION,
56
+ 'os_version': OS_VER,
57
+ }
58
+
59
+ super(ApiError, self).__init__(msg)
59
60
 
60
61
 
61
62
  class ApiClientError(Exception):
62
- """ API Client Error """
63
+ """API Client Error"""
63
64
 
64
65
 
65
66
  class UserError(Exception):
66
- """ User Error """
67
+ """User Error"""
67
68
 
68
69
 
69
70
  class AuthError(Exception):
70
- """Raised when a client has missing or invalid authentication."""
71
+ """Raised when a client has missing or invalid authentication."""
71
72
 
72
73
 
73
74
  def _base_url(url: str) -> str:
74
- """
75
- Extracts the base URL from the url, which is everything before the 4th slash character.
76
- https://www.clarifai.com/v2/models/1/output -> https://www.clarifai.com/v2/
77
- """
78
- try:
79
- return url[:_find_nth(url, '/', 4) + 1]
80
- except Exception:
81
- return ''
75
+ """
76
+ Extracts the base URL from the url, which is everything before the 4th slash character.
77
+ https://www.clarifai.com/v2/models/1/output -> https://www.clarifai.com/v2/
78
+ """
79
+ try:
80
+ return url[: _find_nth(url, '/', 4) + 1]
81
+ except Exception:
82
+ return ''
82
83
 
83
84
 
84
85
  def _find_nth(haystack: str, needle: str, n: int) -> int:
85
- start = haystack.find(needle)
86
- while start >= 0 and n > 1:
87
- start = haystack.find(needle, start + len(needle))
88
- n -= 1
89
- return start
86
+ start = haystack.find(needle)
87
+ while start >= 0 and n > 1:
88
+ start = haystack.find(needle, start + len(needle))
89
+ n -= 1
90
+ return start