clarifai 10.0.0__py3-none-any.whl → 10.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. clarifai/client/base.py +8 -1
  2. clarifai/client/dataset.py +77 -21
  3. clarifai/client/input.py +6 -6
  4. clarifai/client/model.py +1 -1
  5. clarifai/client/module.py +1 -1
  6. clarifai/client/workflow.py +1 -1
  7. clarifai/datasets/upload/features.py +3 -0
  8. clarifai/datasets/upload/image.py +57 -26
  9. clarifai/datasets/upload/loaders/xview_detection.py +4 -0
  10. clarifai/datasets/upload/utils.py +23 -7
  11. clarifai/models/model_serving/README.md +113 -121
  12. clarifai/models/model_serving/__init__.py +2 -0
  13. clarifai/models/model_serving/cli/_utils.py +53 -0
  14. clarifai/models/model_serving/cli/base.py +14 -0
  15. clarifai/models/model_serving/cli/build.py +79 -0
  16. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  17. clarifai/models/model_serving/cli/create.py +171 -0
  18. clarifai/models/model_serving/cli/example_cli.py +34 -0
  19. clarifai/models/model_serving/cli/login.py +26 -0
  20. clarifai/models/model_serving/cli/upload.py +182 -0
  21. clarifai/models/model_serving/constants.py +20 -0
  22. clarifai/models/model_serving/docs/cli.md +150 -0
  23. clarifai/models/model_serving/docs/concepts.md +229 -0
  24. clarifai/models/model_serving/docs/dependencies.md +1 -1
  25. clarifai/models/model_serving/docs/inference_parameters.md +112 -107
  26. clarifai/models/model_serving/docs/model_types.md +16 -17
  27. clarifai/models/model_serving/model_config/__init__.py +4 -2
  28. clarifai/models/model_serving/model_config/base.py +369 -0
  29. clarifai/models/model_serving/model_config/config.py +219 -224
  30. clarifai/models/model_serving/model_config/inference_parameter.py +5 -0
  31. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -24
  32. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -18
  33. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -18
  34. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -18
  35. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -18
  36. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -18
  37. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -28
  38. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -18
  39. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -18
  40. clarifai/models/model_serving/{models → model_config}/output.py +8 -0
  41. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  42. clarifai/models/model_serving/model_config/{serializer.py → triton/serializer.py} +3 -1
  43. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  44. clarifai/models/model_serving/{models/model_types.py → model_config/triton/wrappers.py} +4 -4
  45. clarifai/models/model_serving/{models → repo_build}/__init__.py +2 -0
  46. clarifai/models/model_serving/repo_build/build.py +198 -0
  47. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  48. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  49. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  50. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  51. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  52. clarifai/models/model_serving/{models/pb_model.py → repo_build/static_files/triton/model.py} +15 -14
  53. clarifai/models/model_serving/utils.py +21 -0
  54. clarifai/rag/rag.py +45 -12
  55. clarifai/rag/utils.py +3 -2
  56. clarifai/utils/logging.py +7 -0
  57. clarifai/versions.py +1 -1
  58. {clarifai-10.0.0.dist-info → clarifai-10.1.0.dist-info}/METADATA +28 -5
  59. clarifai-10.1.0.dist-info/RECORD +114 -0
  60. clarifai-10.1.0.dist-info/entry_points.txt +2 -0
  61. clarifai/models/model_serving/cli/deploy_cli.py +0 -123
  62. clarifai/models/model_serving/cli/model_zip.py +0 -61
  63. clarifai/models/model_serving/cli/repository.py +0 -89
  64. clarifai/models/model_serving/docs/custom_config.md +0 -33
  65. clarifai/models/model_serving/docs/output.md +0 -28
  66. clarifai/models/model_serving/models/default_test.py +0 -281
  67. clarifai/models/model_serving/models/inference.py +0 -50
  68. clarifai/models/model_serving/models/test.py +0 -64
  69. clarifai/models/model_serving/pb_model_repository.py +0 -108
  70. clarifai-10.0.0.dist-info/RECORD +0 -103
  71. clarifai-10.0.0.dist-info/entry_points.txt +0 -4
  72. {clarifai-10.0.0.dist-info → clarifai-10.1.0.dist-info}/LICENSE +0 -0
  73. {clarifai-10.0.0.dist-info → clarifai-10.1.0.dist-info}/WHEEL +0 -0
  74. {clarifai-10.0.0.dist-info → clarifai-10.1.0.dist-info}/top_level.txt +0 -0
clarifai/utils/logging.py CHANGED
@@ -99,3 +99,10 @@ def get_logger(logger_level: Union[int, str] = logging.NOTSET,
99
99
 
100
100
  _configure_logger(name, logger_level)
101
101
  return logging.getLogger(name)
102
+
103
+
104
+ def add_file_handler(logger: logging.Logger, file_path: str, log_level: str = 'WARNING') -> None:
105
+ """Add a file handler to the logger."""
106
+ file_handler = logging.FileHandler(file_path)
107
+ file_handler.setLevel(log_level)
108
+ logger.addHandler(file_handler)
clarifai/versions.py CHANGED
@@ -1,6 +1,6 @@
1
1
  import os
2
2
 
3
- CLIENT_VERSION = "10.0.0"
3
+ CLIENT_VERSION = "10.1.0"
4
4
  OS_VER = os.sys.platform
5
5
  PYTHON_VERSION = '.'.join(
6
6
  map(str, [os.sys.version_info.major, os.sys.version_info.minor, os.sys.version_info.micro]))
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: clarifai
3
- Version: 10.0.0
3
+ Version: 10.1.0
4
4
  Summary: Clarifai Python SDK
5
5
  Home-page: https://github.com/Clarifai/clarifai-python
6
6
  Author: Clarifai
@@ -30,7 +30,8 @@ Requires-Dist: rich (>=13.4.2)
30
30
  Requires-Dist: PyYAML (>=6.0.1)
31
31
  Requires-Dist: schema (>=0.7.5)
32
32
  Requires-Dist: Pillow (>=9.5.0)
33
- Requires-Dist: llama-index (>=0.9.27)
33
+ Requires-Dist: inquirerpy (==0.3.4)
34
+ Requires-Dist: llama-index-core (>=0.10.1)
34
35
  Requires-Dist: pypdf (>=3.17.4)
35
36
  Provides-Extra: all
36
37
  Requires-Dist: pycocotools (==2.0.6) ; extra == 'all'
@@ -85,6 +86,7 @@ This is the official Python client for interacting with our powerful [API](https
85
86
  * [Smart Image Search](#smart-image-search)
86
87
  * [Smart Text Search](#smart-text-search)
87
88
  * [Filters](#filters)
89
+ * **[Retrieval Augmented Generation (RAG)](#retrieval-augmented-generation-rag)**
88
90
  * **[More Examples](#pushpin-more-examples)**
89
91
 
90
92
 
@@ -107,9 +109,10 @@ Install from Source:
107
109
  ```bash
108
110
  git clone https://github.com/Clarifai/clarifai-python.git
109
111
  cd clarifai-python
110
- python3 -m venv env
111
- source env/bin/activate
112
- pip3 install -r requirements.txt
112
+ python3 -m venv .venv
113
+ source .venv/bin/activate
114
+ pip install -r requirements.txt
115
+ python setup.py install
113
116
  ```
114
117
 
115
118
 
@@ -430,6 +433,26 @@ Input filters allows to filter by input_type, status of inputs and by inputs_dat
430
433
  results = search.query(filters=[{'input_types': ['image', 'text']}])
431
434
  ```
432
435
 
436
+ ## Retrieval Augmented Generation (RAG)
437
+
438
+ You can setup and start your RAG pipeline in 4 lines of code. The setup method automatically creates a new app and the necessary components under the hood. By default it uses the [mistral-7B-Instruct](https://clarifai.com/mistralai/completion/models/mistral-7B-Instruct) model.
439
+
440
+ ```python
441
+ from clarifai.rag import RAG
442
+
443
+ rag_agent = RAG.setup(user_id="USER_ID")
444
+ rag_agent.upload(folder_path="~/docs")
445
+ rag_agent.chat(messages=[{"role":"human", "content":"What is Clarifai"}])
446
+ ```
447
+
448
+ If you have previously run the setup method, you can instantiate the RAG class with the prompter workflow URL:
449
+
450
+ ```python
451
+ from clarifai.rag import RAG
452
+
453
+ rag_agent = RAG(workflow_url="WORKFLOW_URL")
454
+ ```
455
+
433
456
  ## :pushpin: More Examples
434
457
 
435
458
  See many more code examples in this [repo](https://github.com/Clarifai/examples).
@@ -0,0 +1,114 @@
1
+ clarifai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
4
+ clarifai/versions.py,sha256=rOO7lQPI_tpNMmnkH5-F2q_L7JJQ_Ee0JGsUM5jGZtA,186
5
+ clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,662
6
+ clarifai/client/app.py,sha256=AWUngtwF6a0uS9DInf2arz5Aa-vbxt2Ce5WSNvwlAo0,26946
7
+ clarifai/client/base.py,sha256=hv-eC0qAUMQlbAsSP_JgYfW0Z80C8_K5wo0bmMNoKQs,4963
8
+ clarifai/client/dataset.py,sha256=Wq0dsRC8BfmS02NrMlnOyY2uaUYzzj3NALPjibfk-r8,23783
9
+ clarifai/client/input.py,sha256=RDSGE2GdVqj31whRARFIpblm2_qbcbjW_SkvbUwTRio,38430
10
+ clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
11
+ clarifai/client/model.py,sha256=l2HP9qYuI8LpQRcj83f0HlSGTtbHQcUs3z6R-gLsyIg,24634
12
+ clarifai/client/module.py,sha256=zO65sx7QAaziLIeqwWMR-rxrW9mcm2BZX8oXVyJx5WA,3781
13
+ clarifai/client/runner.py,sha256=nP6QKs8Hy_52skr4gBNAfmPaTcYg20qzZDXnM2IxGlM,9679
14
+ clarifai/client/search.py,sha256=pqX3BJmL8V1RKIGuNkbciDNYGoMwJj3k84B9OvpKl10,10555
15
+ clarifai/client/user.py,sha256=6sOoHiBSHKz6zfEh4cjBbUe5CgmYs96RgHdcMmPoKys,9914
16
+ clarifai/client/workflow.py,sha256=yOS9XwlO-zM6aHuM4aYGBHG1zYL0igXeZLJTkCcR3l4,9998
17
+ clarifai/client/auth/__init__.py,sha256=7EwR0NrozkAUwpUnCsqXvE_p0wqx_SelXlSpKShKJK0,136
18
+ clarifai/client/auth/helper.py,sha256=3lCKo24ZIOlcSh50juJh3ZDagOo_pxEKyoPjWUokYoA,13450
19
+ clarifai/client/auth/register.py,sha256=2CMdBsoVLoTfjyksE6j7BM2tiEc73WKYvxnwDDgNn1k,536
20
+ clarifai/client/auth/stub.py,sha256=KIzJZ8aRB1RzXJeWHDAx19HNdBsblPPHwYLfAkgI3rY,3779
21
+ clarifai/constants/dataset.py,sha256=2QlHF0NMXfAdFlOpEzkNYVZcxSL-dIxq-ZsY_LsIPBA,499
22
+ clarifai/constants/model.py,sha256=LsMkLVkuBpfS4j4yDW9M4O7HxzRpIuSo9qU5T8Wg2Co,217
23
+ clarifai/constants/rag.py,sha256=WcHwToUVIK9ItAhDefaSohQHCLNeR55PSjZ0BFnoZ3U,28
24
+ clarifai/constants/search.py,sha256=_g3S-JEvuygiFfMVK3cl4Ry9erZpt8Zo4ilXL2i3DAE,52
25
+ clarifai/constants/workflow.py,sha256=cECq1xdvf44MCdtK2AbkiuuwhyL-6OWZdQfYbsLKy_o,33
26
+ clarifai/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
+ clarifai/datasets/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
+ clarifai/datasets/export/inputs_annotations.py,sha256=z7kmU9K5m9F5u3iEyCnuKk8Bb97kqGaixm8vJZYT554,9325
29
+ clarifai/datasets/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
+ clarifai/datasets/upload/base.py,sha256=IP4sdBRfThk2l0W1rDWciFrAJnKwVsM-gu4zEslJ2_E,2198
31
+ clarifai/datasets/upload/features.py,sha256=KeVxO36WrL3uqWCN_-aex1k28C5ZRTm6G8SmTtus6KA,1571
32
+ clarifai/datasets/upload/image.py,sha256=Dlt0RM9qWSi4NcbVM1EjS1sp8zfIO3xWZS6TSSLAbVY,7481
33
+ clarifai/datasets/upload/text.py,sha256=ek29V18x5LqmHqc-nmAljQcud9uRjZx8IV_lDX78zsY,1980
34
+ clarifai/datasets/upload/utils.py,sha256=h7mtN9FZXhQQbf47EXczgb-NTY2uOE9AJlE9u4-hDwI,9627
35
+ clarifai/datasets/upload/loaders/README.md,sha256=ag-3lXuvsKTZapvnqBv824rMrVeX0i9U5v1oqhdhvoo,3038
36
+ clarifai/datasets/upload/loaders/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ clarifai/datasets/upload/loaders/coco_captions.py,sha256=t-IaIXukDk1mFdeeqdwe0hLrBLuaF-cZWl2aumGUAls,1297
38
+ clarifai/datasets/upload/loaders/coco_detection.py,sha256=dBYl2a1D7e-N1heXbFK0bImJAuq_lPQ8nxZMa1zq-Ts,2612
39
+ clarifai/datasets/upload/loaders/coco_segmentation.py,sha256=yu9HBHYdKCllF9-6SdQ_2CaKGskE4DdeqCin7zNTN1c,3628
40
+ clarifai/datasets/upload/loaders/imagenet_classification.py,sha256=LuylazxpI5V8fAPGCUxDirGpYMfxzRxix-MEWaCvwxI,1895
41
+ clarifai/datasets/upload/loaders/xview_detection.py,sha256=bSdmEA_YC-uHl-5uSy3HNnxGVAi9I4N_wjOS0hlbW34,6071
42
+ clarifai/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
43
+ clarifai/models/api.py,sha256=d3FQQlG0mNDLrfEvchqaVcq4Tgb_TqryNnJtwp3c7sE,10961
44
+ clarifai/models/model_serving/README.md,sha256=Ln8hsyE38J3yiLZruKHjU_hdq9CjzzbDUAO28Xyw1dQ,4060
45
+ clarifai/models/model_serving/__init__.py,sha256=78fiK9LvdGvpMxICmZWqSIyS6BFATjW2s5R6_GgtbPA,645
46
+ clarifai/models/model_serving/constants.py,sha256=uoi8TqEFkdsHhSZu90HOO3R0BmPC3G0z9qA5ER-5H7w,688
47
+ clarifai/models/model_serving/utils.py,sha256=MXeOHsNHiwx9qsRoX-FzBO2Tmbgo_IVwTf3EUmgdtSQ,524
48
+ clarifai/models/model_serving/cli/__init__.py,sha256=Nls28G-fedNw2oQZIkPQSN__TgjJXbG9RDzzuHIM0VI,575
49
+ clarifai/models/model_serving/cli/_utils.py,sha256=oDd885kwX7u5vf-8dssJFyrR3lEof8x4BXt32egaoKA,1722
50
+ clarifai/models/model_serving/cli/base.py,sha256=k4ARNU1koNzGAi9ach6Vpk7hpISZySiYHyKjkBLuHLg,283
51
+ clarifai/models/model_serving/cli/build.py,sha256=Bfa-PuLIGcreiBr_72XKqCS_IlVJdzJudZkINmX082Y,2774
52
+ clarifai/models/model_serving/cli/clarifai_clis.py,sha256=sGDDj7MrlU3goWLQm4H9dCf4lPD2Ojx50_jdIoxb5QM,663
53
+ clarifai/models/model_serving/cli/create.py,sha256=wtKcVi8XSPN-Fx0RrSUxEwH1hm5TbZ_FrCEMIS9yszM,5598
54
+ clarifai/models/model_serving/cli/example_cli.py,sha256=tCm0J4EI0kuuSRhEiPTuraSA-bUYwtEFEHcL1eOXzRI,1039
55
+ clarifai/models/model_serving/cli/login.py,sha256=TYRQALJZUhNvtx2VcChO0y41YXs8-yP9BrShYb9tcOM,743
56
+ clarifai/models/model_serving/cli/upload.py,sha256=8wYviCTLZYjnXhGykGlm0HhjBd_x5PKp7IKiB8BeOGc,6871
57
+ clarifai/models/model_serving/docs/cli.md,sha256=AM45FZag3520ri4Terb0t7_MmLTs7gjHXAf7TYVZjZk,3942
58
+ clarifai/models/model_serving/docs/concepts.md,sha256=ppQADibKQInf9JpfcH7wIpcMndTZ3618or5yzMhGNOE,9376
59
+ clarifai/models/model_serving/docs/dependencies.md,sha256=apwg_IxDBzovtQYXRpWMU9pUqdf0VaS10yMVOYYXhoc,728
60
+ clarifai/models/model_serving/docs/inference_parameters.md,sha256=EFBQs3OGQNH512zoLJKMfFD6WXE_Tzt_Uvts877VvpQ,4111
61
+ clarifai/models/model_serving/docs/model_types.md,sha256=3sALugeBTMspEnlPNWXI8xtWCxjMDQYjrAji_jgqHVo,1013
62
+ clarifai/models/model_serving/model_config/__init__.py,sha256=MLnCl4U2UlL8hkvKbKifFX2nKRjVN63687-gxiKf8g4,734
63
+ clarifai/models/model_serving/model_config/base.py,sha256=Jow6cFvREtWRaaXw1hobWJks0uYsOi9oL973ZPEfIkk,14636
64
+ clarifai/models/model_serving/model_config/config.py,sha256=EWkPcui370QEYJAjlzuLupLlaZF2BgFbK0Jhx_JDHnk,10188
65
+ clarifai/models/model_serving/model_config/inference_parameter.py,sha256=fDPRkwsntaGZWQWOiCW8x0tcyHPeSCYZwBZoZb2oBzw,3924
66
+ clarifai/models/model_serving/model_config/output.py,sha256=uyXY-B9mmoe8lizTpYEBRYI1KDNQh3ihEiEB4Ne65uc,4634
67
+ clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml,sha256=4wFQ2R8PiJrXR_8AEgUDD-22gY9sK93y9r68mSOOVnw,541
68
+ clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml,sha256=0hicyQM-R2Za62RaBexdNCkHBDdacwMRVAL8Yk_sVzs,421
69
+ clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml,sha256=MEnVsO3-SAOFSW7-b0BOSxgUNxdhXfmE98hXstBt104,395
70
+ clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml,sha256=FPO9ic0R_mcFa3nIGon9z3negy1q6LsPRNmJ-wqGhyw,383
71
+ clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml,sha256=7u_0kdiR2iEuXTKHtErUzZZ8ghUdep-RuWmJd9i8BdY,371
72
+ clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml,sha256=UDq-VtnnnhuI7NCJOYM19kFvcMS0aOvDDMSblPk5iYY,468
73
+ clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml,sha256=cJsalUTzXclXpgzH9CutpWQqseJNg9FrI7WjU3wpfuQ,852
74
+ clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml,sha256=OQYdrY81rD3WNooHRkOiQASvL3XfGG9GGzT61jEsrT8,406
75
+ clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml,sha256=mQLwA1JnnvWZwp26lVxzwfhp1GS7hH9yHh7mtOUt9rY,474
76
+ clarifai/models/model_serving/model_config/triton/__init__.py,sha256=uJUjpRauhVp6_9sN5DRQi7bwIKEtHPKyQqcCVj6Aj2g,719
77
+ clarifai/models/model_serving/model_config/triton/serializer.py,sha256=eYwXfaJkeXMaiQq_EDG4vWOCc1CKfnC_U6dSp2Urak0,4278
78
+ clarifai/models/model_serving/model_config/triton/triton_config.py,sha256=mDZafUByvEgM1vd0QZL8nM-cOCqeR-06iOC2T6x8hr4,4696
79
+ clarifai/models/model_serving/model_config/triton/wrappers.py,sha256=-O8t2AEJXvqJlUNtKtr8CUlxLjheV2GfBtM0sB_B1v0,8660
80
+ clarifai/models/model_serving/repo_build/__init__.py,sha256=jFb0RNG4Jh63TH35_Urv0EyNXVMW8FEC2NVHXhlbvqg,673
81
+ clarifai/models/model_serving/repo_build/build.py,sha256=IlJTjt5YI1alAGv1Fw3kPZeh3yqi45R20rKbWN9vV1s,7195
82
+ clarifai/models/model_serving/repo_build/static_files/_requirements.txt,sha256=lIXMfxC4BP6QA5hraObPOwUS3PK9F2mA0Gf8KvlijQE,34
83
+ clarifai/models/model_serving/repo_build/static_files/base_test.py,sha256=wuwoXk37bgDaLmE-h4KfMoz0Qvr6B-InLzSORYzwF3A,6780
84
+ clarifai/models/model_serving/repo_build/static_files/inference.py,sha256=TejkXZw43mcZD-M9TkfuqMuABz_cliJgf53_Teodtf0,721
85
+ clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml,sha256=VOFSSb7D_CgRRcqi-plaCH-6hoFO8NAGDNXVSOJGylo,678
86
+ clarifai/models/model_serving/repo_build/static_files/test.py,sha256=GunBqWgTyo0aF5W9ckKz55tGS-wkL9S9TRfytIjB7Eo,1505
87
+ clarifai/models/model_serving/repo_build/static_files/triton/model.py,sha256=l9lkwyeXw9H_K4Om9dGcuylnj4hAlzohspUZkSnQ7Qg,2429
88
+ clarifai/modules/README.md,sha256=mx8pVx6cPp-pP4LcFPT_nX3ngGmhygVK0WiXeD3cbIo,367
89
+ clarifai/modules/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
90
+ clarifai/modules/css.py,sha256=kadCEunmyh5h2yf0-4aysE3ZcZ6qaQcxuAgDXS96yF8,2020
91
+ clarifai/modules/pages.py,sha256=iOoM3RNRMgXlV0qBqcdQofxoXo2RuRQh0h9c9BIS0-I,1383
92
+ clarifai/modules/style.css,sha256=j7FNPZVhLPj35vvBksAJ90RuX5sLuqzDR5iM2WIEhiA,6073
93
+ clarifai/rag/__init__.py,sha256=wu3PzAzo7uqgrEzuaC9lY_3gj1HFiR3GU3elZIKTT5g,40
94
+ clarifai/rag/rag.py,sha256=N4nhjFRqV2bCPE0W4utsFe8F_5ajhHOF95LRbDLUpC8,11661
95
+ clarifai/rag/utils.py,sha256=1FtQ3_URdtMQpHZdGeDLGibhvcAvKQ8J7ctecATLEhs,3435
96
+ clarifai/runners/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
97
+ clarifai/runners/example.py,sha256=V0Nc52JkhCm97oaWzKVg71g50M1ltxI9jyPMo6tKU6E,1302
98
+ clarifai/runners/example_llama2.py,sha256=WMGTqv3v9t3ID1rjW9BTLMkIuvyTESL6xHcOO6A220Y,2712
99
+ clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
100
+ clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
101
+ clarifai/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
102
+ clarifai/utils/logging.py,sha256=F19UmdeJKwIy8Nqo8o0hegf-qJGqzqtQ5Bi0Rz2NP4Q,3582
103
+ clarifai/utils/misc.py,sha256=cC_j0eEsJ8bfnj0oRd2z-Rms1mQbAfLwrSs07hwQuCE,1420
104
+ clarifai/utils/model_train.py,sha256=v4-bsPOOi-jxzwDxdNf2exaWPEpKD7BYcc6w0kMds4o,7832
105
+ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
106
+ clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
107
+ clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
108
+ clarifai/workflows/validate.py,sha256=iCEKBTtB-57uE3LVU7D4AI9BRHxIxahk3U1Ro08HP-o,2535
109
+ clarifai-10.1.0.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
110
+ clarifai-10.1.0.dist-info/METADATA,sha256=zWB-J2f4iI9LDEc9mUTq-xK2TLJs7WzFiLQwnnNtS1g,17381
111
+ clarifai-10.1.0.dist-info/WHEEL,sha256=pkctZYzUS4AYVn6dJ-7367OJZivF2e8RA9b_ZBjif18,92
112
+ clarifai-10.1.0.dist-info/entry_points.txt,sha256=qZOr_MIPG0dBBE1zringDJS_wXNGTAA_SQ-zcbmDHOw,82
113
+ clarifai-10.1.0.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
114
+ clarifai-10.1.0.dist-info/RECORD,,
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ clarifai = clarifai.models.model_serving.cli.clarifai_clis:main
@@ -1,123 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Commandline interface for model upload utils."""
14
- import argparse
15
-
16
- from clarifai.client.auth.helper import ClarifaiAuthHelper
17
- from clarifai.models.api import Models
18
- from clarifai.models.model_serving.model_config import MODEL_TYPES, get_model_config
19
- from clarifai.models.model_serving.model_config.inference_parameter import InferParamManager
20
-
21
-
22
- def deploy(model_url,
23
- model_id: str = None,
24
- model_type: str = None,
25
- desc: str = "",
26
- update_version: bool = False,
27
- inference_params_file: str = ""):
28
- # init Auth from env vars
29
- auth = ClarifaiAuthHelper.from_env()
30
- # init api
31
- model_api = Models(auth)
32
-
33
- # parsing model name/type.
34
- # if filename having this format: <model_id>_<model-type>
35
- # e.i yolov5s_coco_visual-dectector
36
- # else user has to input model_type and model_id
37
- zip_filename = model_url.split('/')[-1]
38
- zip_filename = zip_filename.split('.')[0]
39
-
40
- def _parse_name(name):
41
- *id_, type_ = name.split('_')
42
- return "_".join(id_), type_
43
-
44
- # parse model_id
45
- if not model_id and "_" in zip_filename:
46
- model_id = _parse_name(zip_filename)[0]
47
- assert model_id, "Can not parse model_id from url, please input it directly"
48
- # parse model_type
49
- if not model_type and "_" in zip_filename:
50
- model_type = _parse_name(zip_filename)[-1]
51
- assert model_type, "Can not parse model_type from url, please input it directly"
52
- # key map
53
- assert model_type in MODEL_TYPES, f"model_type should be one of {MODEL_TYPES}"
54
- clarifai_key_map = get_model_config(model_type=model_type).field_maps
55
- # inference parameters
56
- inference_parameters = InferParamManager(json_path=inference_params_file).get_list_params()
57
-
58
- # if updating new version of existing model
59
- if update_version:
60
- resp = model_api.post_model_version(
61
- model_id=model_id,
62
- model_zip_url=model_url,
63
- input=clarifai_key_map.input_fields_map,
64
- outputs=clarifai_key_map.output_fields_map,
65
- param_specs=inference_parameters)
66
- # creating new model
67
- else:
68
- # post model
69
- resp = model_api.upload_model(
70
- model_id=model_id,
71
- model_zip_url=model_url,
72
- model_type=model_type,
73
- input=clarifai_key_map.input_fields_map,
74
- outputs=clarifai_key_map.output_fields_map,
75
- description=desc,
76
- param_specs=inference_parameters)
77
- # response
78
- if resp["status"]["code"] != "SUCCESS":
79
- raise Exception("Post models failed, details: {}, {}".format(resp["status"]["description"],
80
- resp["status"]["details"]))
81
- else:
82
- print("Success!")
83
- print(f'Model version: {resp["model"]["model_version"]["id"]}')
84
-
85
-
86
- def main():
87
- parser = argparse.ArgumentParser(description=__doc__)
88
- # args
89
- parser.add_argument("--url", type=str, required=True, help="Direct download url of zip file")
90
- parser.add_argument("--model_id", type=str, required=False, default="", help="Custom model id.")
91
- parser.add_argument(
92
- "--model_type",
93
- type=str,
94
- required=False,
95
- choices=MODEL_TYPES,
96
- default="",
97
- help="Clarifai model type")
98
- parser.add_argument(
99
- "--desc", type=str, required=False, default="", help="Short desccription of model")
100
- parser.add_argument(
101
- "--update_version",
102
- action="store_true",
103
- required=False,
104
- help="Update exist model with new version")
105
-
106
- parser.add_argument(
107
- "--infer_param",
108
- required=False,
109
- default="",
110
- help="Path to json file contains inference parameters")
111
-
112
- args = parser.parse_args()
113
- deploy(
114
- model_url=args.url,
115
- model_id=args.model_id,
116
- desc=args.desc,
117
- model_type=args.model_type,
118
- update_version=args.update_version,
119
- inference_params_file=args.infer_param)
120
-
121
-
122
- if __name__ == "__main__":
123
- main()
@@ -1,61 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Triton model zip commandline interface."""
14
-
15
- import argparse
16
- import zipfile
17
- from pathlib import Path
18
- from typing import Union
19
-
20
-
21
- def zip_dir(triton_repository_dir: Union[Path, str], zip_filename: Union[Path, str]):
22
- """
23
- Generate triton model repository zip file for upload.
24
- Args:
25
- -----
26
- triton_repository_dir: Directory of triton model respository to be zipped
27
- zip_filename: Triton model repository zip filename
28
-
29
- Returns:
30
- --------
31
- None
32
- """
33
- # Convert to Path object
34
- dir = Path(triton_repository_dir)
35
-
36
- with zipfile.ZipFile(zip_filename, "w", zipfile.ZIP_DEFLATED) as zip_file:
37
- for entry in dir.rglob("*"):
38
- zip_file.write(entry, entry.relative_to(dir))
39
-
40
-
41
- def main():
42
- """Triton model zip cli."""
43
- parser = argparse.ArgumentParser(__doc__)
44
- parser.add_argument(
45
- "--triton_model_repository",
46
- type=str,
47
- required=True,
48
- help="Path to the triton model repository to zip.")
49
- parser.add_argument(
50
- "--zipfile_name",
51
- type=str,
52
- required=True,
53
- help="Name of the zipfile to be created. \
54
- <model_name>_<model_type> is the recommended naming convention.e.g. yolov5_visual-detector.zip"
55
- )
56
- args = parser.parse_args()
57
- zip_dir(args.triton_model_repository, args.zipfile_name)
58
-
59
-
60
- if __name__ == "__main__":
61
- main()
@@ -1,89 +0,0 @@
1
- # Copyright 2023 Clarifai, Inc.
2
- # Licensed under the Apache License, Version 2.0 (the "License");
3
- # you may not use this file except in compliance with the License.
4
- # You may obtain a copy of the License at
5
- #
6
- # http://www.apache.org/licenses/LICENSE-2.0
7
- #
8
- # Unless required by applicable law or agreed to in writing, software
9
- # distributed under the License is distributed on an "AS IS" BASIS,
10
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
- # See the License for the specific language governing permissions and
12
- # limitations under the License.
13
- """Triton model repository generation commandline interface."""
14
-
15
- import argparse
16
-
17
- from ..constants import MAX_HW_DIM
18
- from ..model_config import MODEL_TYPES, get_model_config
19
- from ..pb_model_repository import TritonModelRepository
20
-
21
-
22
- def dims_type(shape_string: str):
23
- """Read list string from cli and convert values to a list of integers."""
24
- shape_string = shape_string.replace("[", "").replace("]", "")
25
- shapes = list(map(int, shape_string.split(",")))
26
- return shapes
27
-
28
-
29
- def model_upload_init():
30
- """
31
- Clarifai triton model upload commandline tool.
32
- """
33
- parser = argparse.ArgumentParser(description=__doc__)
34
- # TritonModelConfig args
35
- parser.add_argument("--model_name", type=str, required=True, help="Inference Model Name")
36
- parser.add_argument(
37
- "--model_version",
38
- type=str,
39
- default="1",
40
- required=False,
41
- help="Triton inference model version name. 1 stands for version 1. \
42
- Leave as default value (Recommended).")
43
- parser.add_argument(
44
- "--model_type",
45
- type=str,
46
- choices=MODEL_TYPES,
47
- required=True,
48
- help=f"Clarifai supported model types.\n Model-types-map: {MODEL_TYPES}",
49
- )
50
- parser.add_argument(
51
- "--image_shape",
52
- type=dims_type,
53
- default="[-1, -1]",
54
- required=False,
55
- help="(H, W) dims for models with an image input type. H and W each have a max value of 1024",
56
- )
57
- parser.add_argument(
58
- "--repo_dir",
59
- type=str,
60
- default=".",
61
- required=True,
62
- help="Directory to create triton repository.")
63
- parser.add_argument("--max_bs", type=int, default=1, required=False, help="Max batch size")
64
-
65
- args = parser.parse_args()
66
-
67
- if len(args.image_shape) != 2:
68
- raise ValueError(
69
- f"image_shape takes 2 values, Height and Width. Got {len(args.image_shape)} values instead."
70
- )
71
-
72
- if args.image_shape[0] > MAX_HW_DIM or args.image_shape[1] > MAX_HW_DIM:
73
- raise ValueError(
74
- f"H and W each have a maximum value of 1024. Got H: {args.image_shape[0]}, W: {args.image_shape[1]}"
75
- )
76
-
77
- model_config = get_model_config(args.model_type).make_triton_model_config(
78
- model_name=args.model_name,
79
- model_version="1",
80
- image_shape=args.image_shape,
81
- max_batch_size=args.max_bs,
82
- )
83
-
84
- triton_repo = TritonModelRepository(model_config)
85
- triton_repo.build_repository(args.repo_dir)
86
-
87
-
88
- if __name__ == "__main__":
89
- model_upload_init()
@@ -1,33 +0,0 @@
1
- ## Custom Triton Configurations
2
-
3
- The commandline triton model repository generation utils do work with default values for the various triton configurations but a few of these config values can be modified to suit different task specific needs.
4
-
5
- * For vision models for instance, different input shapes for the `Height (H)` and `Width (W)` are supported and can be set via the commandline too.i.e.
6
- ```console
7
- $ clarifai-model-upload-init --model_name <Your model name> \
8
- --model_type <select model type from available ones> \
9
- --image_shape "H, W"
10
- --repo_dir <directory in which to create your model repository>
11
- ```
12
- `H` and `W` each have a maximum value of 1024.
13
- `--image_shape` accepts both `"H, W"` and `"[H, W]"` format input.
14
-
15
-
16
- ## Generating the triton model repository without the commandline
17
-
18
- The triton model repository can be generated via a python script specifying the same values as required in the commandline. Below is a sample of how the code would be structured with `visual_classifier`.
19
-
20
- ```python
21
- from clarifai.models.model_serving.model_config import get_model_config, ModelTypes, TritonModelConfig
22
- from clarifai.models.model_serving.pb_model_repository import TritonModelRepository
23
-
24
- model_type = ModelTypes.visual_classifier
25
- model_config: TritonModelConfig = get_model_config(model_type).make_triton_model_config(
26
- model_name="<model_name>",
27
- model_version="1",
28
- image_shape=<[H,W]>, # 0 < [H,W] <= 1024
29
- )
30
-
31
- triton_repo = TritonModelRepository(model_config)
32
- triton_repo.build_repository("<dir>")
33
- ```
@@ -1,28 +0,0 @@
1
- ## Clarifai Model Prediction Output Formats.
2
-
3
- Different models return different types of predictions and Clarifai output dataclasses aim at standardizing the output formats per model type for compatibility with the Clarifai API.
4
-
5
- Each machine learning modality supported by the Clarifai API has a predefined dataclass output format with all attributes being of numpy ndarray type.
6
-
7
- ## Supported Formats
8
-
9
- Usage:
10
- ```python
11
- from clarifai.models.model_serving.models.output import VisualDetectorOutput
12
- ```
13
- | Output Type (dataclass) | Attributes | Attribute Data Type| Attribute Shapes | Description |
14
- | --- | --- | --- | --- | --- |
15
- | [VisualDetectorOutput](../models/output.py) | `predicted_bboxes` | float32 | [-1, 4] | A 2D detected bounding boxes array of any length with each element array having a length of exactly 4. All bbox coordinates MUST be normalized between 0 & 1. |
16
- | | `predicted_labels` | int32 | [-1, 1] | A 2D detected labels array of length equal to that of predicted_bboxes with each element array having a length of exactly 1.
17
- | | `predicted_scores` | float32 | [-1, 1] | A 2D detection scores array of length equal to that of predicted_bboxes & predicted_labels with each element array having a length of exactly 1.
18
- | | | | | |
19
- | [ClassifierOutput](../models/output.py) | `predicted_scores` | float32 | [-1] | The softmax of the model's predictions. The index of each predicted probability as returned by the model must correspond to the label index in the labels.txt file |
20
- | | | | | |
21
- | [TextOutput](../models/output.py) | `predicted_text` | string | [1] | Predicted text from a model |
22
- | | | | | |
23
- | [EmbeddingOutput](../models/output.py) | `embedding_vector` | float32 | [-1] | The embedding vector (image or text embedding) returned by a model |
24
- | | | | | |
25
- | [MasksOutput](../models/output.py) | `predicted_mask` | int64 | [-1, -1] | The model predicted image mask. The predicted class indices must be assigned to the corresponding image pixels in the mask where that class is predicted by the model. |
26
- | | | | | |
27
- | [ImageOutput](../models/output.py) | `image` | unint8 | [-1, -1, 3] | The model predicted/generated image |
28
- | | | | | |