clarifai 10.11.1__py3-none-any.whl → 10.11.2rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (159) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  4. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  5. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  11. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  12. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  13. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  14. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  15. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  16. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  17. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  18. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  19. clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  21. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  23. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  24. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  25. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  26. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  27. clarifai/client/dataset.py +4 -4
  28. clarifai/client/model.py +94 -13
  29. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  30. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  31. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  32. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  33. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  34. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  35. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  36. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  37. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  38. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  39. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  40. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  41. clarifai/datasets/upload/features.py +1 -1
  42. clarifai/datasets/upload/multimodal.py +2 -1
  43. clarifai/datasets/upload/text.py +3 -2
  44. clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
  45. clarifai/models/model_serving/README.md +158 -0
  46. clarifai/models/model_serving/__init__.py +14 -0
  47. clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
  48. clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
  49. clarifai/models/model_serving/cli/__init__.py +12 -0
  50. clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  51. clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
  52. clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
  53. clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
  54. clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
  55. clarifai/models/model_serving/cli/_utils.py +53 -0
  56. clarifai/models/model_serving/cli/base.py +14 -0
  57. clarifai/models/model_serving/cli/build.py +79 -0
  58. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  59. clarifai/models/model_serving/cli/create.py +171 -0
  60. clarifai/models/model_serving/cli/example_cli.py +34 -0
  61. clarifai/models/model_serving/cli/login.py +26 -0
  62. clarifai/models/model_serving/cli/upload.py +183 -0
  63. clarifai/models/model_serving/constants.py +21 -0
  64. clarifai/models/model_serving/docs/cli.md +161 -0
  65. clarifai/models/model_serving/docs/concepts.md +229 -0
  66. clarifai/models/model_serving/docs/dependencies.md +11 -0
  67. clarifai/models/model_serving/docs/inference_parameters.md +139 -0
  68. clarifai/models/model_serving/docs/model_types.md +19 -0
  69. clarifai/models/model_serving/model_config/__init__.py +16 -0
  70. clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
  71. clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
  72. clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
  73. clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
  74. clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
  75. clarifai/models/model_serving/model_config/base.py +369 -0
  76. clarifai/models/model_serving/model_config/config.py +312 -0
  77. clarifai/models/model_serving/model_config/inference_parameter.py +129 -0
  78. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -0
  79. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -0
  80. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -0
  81. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -0
  82. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -0
  83. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -0
  84. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -0
  85. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -0
  86. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -0
  87. clarifai/models/model_serving/model_config/output.py +133 -0
  88. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  89. clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
  90. clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
  91. clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
  92. clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
  93. clarifai/models/model_serving/model_config/triton/serializer.py +136 -0
  94. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  95. clarifai/models/model_serving/model_config/triton/wrappers.py +281 -0
  96. clarifai/models/model_serving/repo_build/__init__.py +14 -0
  97. clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
  98. clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
  99. clarifai/models/model_serving/repo_build/build.py +198 -0
  100. clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
  101. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  102. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  103. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  104. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  105. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  106. clarifai/models/model_serving/repo_build/static_files/triton/model.py +75 -0
  107. clarifai/models/model_serving/utils.py +31 -0
  108. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  109. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  110. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  111. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  112. clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
  113. clarifai/runners/deepgram_live_transcribe.py +98 -0
  114. clarifai/runners/deepgram_live_transcribe.py~ +98 -0
  115. clarifai/runners/deepgram_runner.py +131 -0
  116. clarifai/runners/deepgram_runner.py~ +130 -0
  117. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  118. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +79 -0
  119. clarifai/runners/example_llama2.py~ +72 -0
  120. clarifai/runners/matt_example.py +89 -0
  121. clarifai/runners/matt_example.py~ +87 -0
  122. clarifai/runners/matt_llm_example.py +129 -0
  123. clarifai/runners/matt_llm_example.py~ +128 -0
  124. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  125. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  126. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  127. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  128. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  129. clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
  130. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  131. clarifai/runners/models/model_upload.py +75 -10
  132. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  133. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  134. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  135. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  136. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  137. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  138. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  139. clarifai/runners/utils/const.py +27 -25
  140. clarifai/runners/utils/loader.py +65 -17
  141. clarifai/runners/utils/logging.py +6 -0
  142. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  143. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  144. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  145. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  146. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  147. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  148. clarifai/utils/logging.py +7 -0
  149. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  150. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  151. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  152. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  153. {clarifai-10.11.1.dist-info → clarifai-10.11.2rc2.dist-info}/METADATA +15 -15
  154. clarifai-10.11.2rc2.dist-info/RECORD +242 -0
  155. {clarifai-10.11.1.dist-info → clarifai-10.11.2rc2.dist-info}/WHEEL +1 -1
  156. clarifai-10.11.1.dist-info/RECORD +0 -100
  157. {clarifai-10.11.1.dist-info → clarifai-10.11.2rc2.dist-info}/LICENSE +0 -0
  158. {clarifai-10.11.1.dist-info → clarifai-10.11.2rc2.dist-info}/entry_points.txt +0 -0
  159. {clarifai-10.11.1.dist-info → clarifai-10.11.2rc2.dist-info}/top_level.txt +0 -0
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "10.11.1"
1
+ __version__ = "10.11.2rc2"
@@ -281,9 +281,9 @@ class Dataset(Lister, BaseClient):
281
281
  failed_input_ids = list(set(failed_input_ids) - set(duplicate_input_ids))
282
282
  duplicate_details = [[
283
283
  input_ids[id], id, "Input has a duplicate ID.",
284
- dataset_obj.data_generator[input_ids[id]].image_path,
285
- dataset_obj.data_generator[input_ids[id]].labels,
286
- dataset_obj.data_generator[input_ids[id]].metadata
284
+ getattr(dataset_obj.data_generator[input_ids[id]], 'image_path', None),
285
+ getattr(dataset_obj.data_generator[input_ids[id]], 'labels', None),
286
+ getattr(dataset_obj.data_generator[input_ids[id]], 'metadata', None)
287
287
  ] for id in duplicate_input_ids]
288
288
  duplicate_table = tabulate(
289
289
  duplicate_details,
@@ -386,7 +386,7 @@ class Dataset(Lister, BaseClient):
386
386
  tablefmt="grid")
387
387
  timestamp = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
388
388
  self.logger.warning(
389
- f"{timestamp}\nFailed to upload {len(failed_retrying_inputs)} inputs in current batch {batch_no}:\n{failed_table}\n\n"
389
+ f"{timestamp}\nFailed to upload {len(failed_retrying_inputs)} inputs in current batch {batch_no} due to {retry_response}:\n{failed_table}\n\n"
390
390
  )
391
391
 
392
392
  def _data_upload(self,
clarifai/client/model.py CHANGED
@@ -503,6 +503,7 @@ class Model(Lister, BaseClient):
503
503
  compute_cluster_id: str = None,
504
504
  nodepool_id: str = None,
505
505
  deployment_id: str = None,
506
+ user_id: str = None,
506
507
  inference_params: Dict = {},
507
508
  output_config: Dict = {}):
508
509
  """Predicts the model based on the given filepath.
@@ -534,7 +535,7 @@ class Model(Lister, BaseClient):
534
535
  file_bytes = f.read()
535
536
 
536
537
  return self.predict_by_bytes(file_bytes, input_type, compute_cluster_id, nodepool_id,
537
- deployment_id, inference_params, output_config)
538
+ deployment_id, user_id, inference_params, output_config)
538
539
 
539
540
  def predict_by_bytes(self,
540
541
  input_bytes: bytes,
@@ -542,6 +543,7 @@ class Model(Lister, BaseClient):
542
543
  compute_cluster_id: str = None,
543
544
  nodepool_id: str = None,
544
545
  deployment_id: str = None,
546
+ user_id: str = None,
545
547
  inference_params: Dict = {},
546
548
  output_config: Dict = {}):
547
549
  """Predicts the model based on the given bytes.
@@ -581,11 +583,23 @@ class Model(Lister, BaseClient):
581
583
 
582
584
  runner_selector = None
583
585
  if deployment_id:
586
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
587
+ raise UserError(
588
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
589
+ )
590
+ if not user_id:
591
+ user_id = os.environ.get('CLARIFAI_USER_ID')
584
592
  runner_selector = Deployment.get_runner_selector(
585
- user_id=self.user_id, deployment_id=deployment_id)
593
+ user_id=user_id, deployment_id=deployment_id)
586
594
  elif compute_cluster_id and nodepool_id:
595
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
596
+ raise UserError(
597
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
598
+ )
599
+ if not user_id:
600
+ user_id = os.environ.get('CLARIFAI_USER_ID')
587
601
  runner_selector = Nodepool.get_runner_selector(
588
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
602
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
589
603
 
590
604
  return self.predict(
591
605
  inputs=[input_proto],
@@ -599,6 +613,7 @@ class Model(Lister, BaseClient):
599
613
  compute_cluster_id: str = None,
600
614
  nodepool_id: str = None,
601
615
  deployment_id: str = None,
616
+ user_id: str = None,
602
617
  inference_params: Dict = {},
603
618
  output_config: Dict = {}):
604
619
  """Predicts the model based on the given URL.
@@ -639,11 +654,23 @@ class Model(Lister, BaseClient):
639
654
 
640
655
  runner_selector = None
641
656
  if deployment_id:
657
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
658
+ raise UserError(
659
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
660
+ )
661
+ if not user_id:
662
+ user_id = os.environ.get('CLARIFAI_USER_ID')
642
663
  runner_selector = Deployment.get_runner_selector(
643
- user_id=self.user_id, deployment_id=deployment_id)
664
+ user_id=user_id, deployment_id=deployment_id)
644
665
  elif compute_cluster_id and nodepool_id:
666
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
667
+ raise UserError(
668
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
669
+ )
670
+ if not user_id:
671
+ user_id = os.environ.get('CLARIFAI_USER_ID')
645
672
  runner_selector = Nodepool.get_runner_selector(
646
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
673
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
647
674
 
648
675
  return self.predict(
649
676
  inputs=[input_proto],
@@ -712,6 +739,7 @@ class Model(Lister, BaseClient):
712
739
  compute_cluster_id: str = None,
713
740
  nodepool_id: str = None,
714
741
  deployment_id: str = None,
742
+ user_id: str = None,
715
743
  inference_params: Dict = {},
716
744
  output_config: Dict = {}):
717
745
  """Generate the stream output on model based on the given filepath.
@@ -748,6 +776,7 @@ class Model(Lister, BaseClient):
748
776
  compute_cluster_id=compute_cluster_id,
749
777
  nodepool_id=nodepool_id,
750
778
  deployment_id=deployment_id,
779
+ user_id=user_id,
751
780
  inference_params=inference_params,
752
781
  output_config=output_config)
753
782
 
@@ -757,6 +786,7 @@ class Model(Lister, BaseClient):
757
786
  compute_cluster_id: str = None,
758
787
  nodepool_id: str = None,
759
788
  deployment_id: str = None,
789
+ user_id: str = None,
760
790
  inference_params: Dict = {},
761
791
  output_config: Dict = {}):
762
792
  """Generate the stream output on model based on the given bytes.
@@ -798,11 +828,21 @@ class Model(Lister, BaseClient):
798
828
 
799
829
  runner_selector = None
800
830
  if deployment_id:
831
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
832
+ raise UserError(
833
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
834
+ )
801
835
  runner_selector = Deployment.get_runner_selector(
802
- user_id=self.user_id, deployment_id=deployment_id)
836
+ user_id=user_id, deployment_id=deployment_id)
803
837
  elif compute_cluster_id and nodepool_id:
838
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
839
+ raise UserError(
840
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
841
+ )
842
+ if not user_id:
843
+ user_id = os.environ.get('CLARIFAI_USER_ID')
804
844
  runner_selector = Nodepool.get_runner_selector(
805
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
845
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
806
846
 
807
847
  return self.generate(
808
848
  inputs=[input_proto],
@@ -816,6 +856,7 @@ class Model(Lister, BaseClient):
816
856
  compute_cluster_id: str = None,
817
857
  nodepool_id: str = None,
818
858
  deployment_id: str = None,
859
+ user_id: str = None,
819
860
  inference_params: Dict = {},
820
861
  output_config: Dict = {}):
821
862
  """Generate the stream output on model based on the given URL.
@@ -857,11 +898,23 @@ class Model(Lister, BaseClient):
857
898
 
858
899
  runner_selector = None
859
900
  if deployment_id:
901
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
902
+ raise UserError(
903
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
904
+ )
905
+ if not user_id:
906
+ user_id = os.environ.get('CLARIFAI_USER_ID')
860
907
  runner_selector = Deployment.get_runner_selector(
861
- user_id=self.user_id, deployment_id=deployment_id)
908
+ user_id=user_id, deployment_id=deployment_id)
862
909
  elif compute_cluster_id and nodepool_id:
910
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
911
+ raise UserError(
912
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
913
+ )
914
+ if not user_id:
915
+ user_id = os.environ.get('CLARIFAI_USER_ID')
863
916
  runner_selector = Nodepool.get_runner_selector(
864
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
917
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
865
918
 
866
919
  return self.generate(
867
920
  inputs=[input_proto],
@@ -930,6 +983,7 @@ class Model(Lister, BaseClient):
930
983
  compute_cluster_id: str = None,
931
984
  nodepool_id: str = None,
932
985
  deployment_id: str = None,
986
+ user_id: str = None,
933
987
  inference_params: Dict = {},
934
988
  output_config: Dict = {}):
935
989
  """Stream the model output based on the given filepath.
@@ -964,6 +1018,7 @@ class Model(Lister, BaseClient):
964
1018
  compute_cluster_id=compute_cluster_id,
965
1019
  nodepool_id=nodepool_id,
966
1020
  deployment_id=deployment_id,
1021
+ user_id=user_id,
967
1022
  inference_params=inference_params,
968
1023
  output_config=output_config)
969
1024
 
@@ -973,6 +1028,7 @@ class Model(Lister, BaseClient):
973
1028
  compute_cluster_id: str = None,
974
1029
  nodepool_id: str = None,
975
1030
  deployment_id: str = None,
1031
+ user_id: str = None,
976
1032
  inference_params: Dict = {},
977
1033
  output_config: Dict = {}):
978
1034
  """Stream the model output based on the given bytes.
@@ -1016,11 +1072,23 @@ class Model(Lister, BaseClient):
1016
1072
 
1017
1073
  runner_selector = None
1018
1074
  if deployment_id:
1075
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1076
+ raise UserError(
1077
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
1078
+ )
1079
+ if not user_id:
1080
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1019
1081
  runner_selector = Deployment.get_runner_selector(
1020
- user_id=self.user_id, deployment_id=deployment_id)
1082
+ user_id=user_id, deployment_id=deployment_id)
1021
1083
  elif compute_cluster_id and nodepool_id:
1084
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1085
+ raise UserError(
1086
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
1087
+ )
1088
+ if not user_id:
1089
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1022
1090
  runner_selector = Nodepool.get_runner_selector(
1023
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1091
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1024
1092
 
1025
1093
  return self.stream(
1026
1094
  inputs=input_generator(),
@@ -1034,6 +1102,7 @@ class Model(Lister, BaseClient):
1034
1102
  compute_cluster_id: str = None,
1035
1103
  nodepool_id: str = None,
1036
1104
  deployment_id: str = None,
1105
+ user_id: str = None,
1037
1106
  inference_params: Dict = {},
1038
1107
  output_config: Dict = {}):
1039
1108
  """Stream the model output based on the given URL.
@@ -1075,11 +1144,23 @@ class Model(Lister, BaseClient):
1075
1144
 
1076
1145
  runner_selector = None
1077
1146
  if deployment_id:
1147
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1148
+ raise UserError(
1149
+ "User ID is required for model prediction with deployment ID, please provide user_id in the method call."
1150
+ )
1151
+ if not user_id:
1152
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1078
1153
  runner_selector = Deployment.get_runner_selector(
1079
- user_id=self.user_id, deployment_id=deployment_id)
1154
+ user_id=user_id, deployment_id=deployment_id)
1080
1155
  elif compute_cluster_id and nodepool_id:
1156
+ if not user_id and not os.environ.get('CLARIFAI_USER_ID'):
1157
+ raise UserError(
1158
+ "User ID is required for model prediction with compute cluster ID and nodepool ID, please provide user_id in the method call."
1159
+ )
1160
+ if not user_id:
1161
+ user_id = os.environ.get('CLARIFAI_USER_ID')
1081
1162
  runner_selector = Nodepool.get_runner_selector(
1082
- user_id=self.user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1163
+ user_id=user_id, compute_cluster_id=compute_cluster_id, nodepool_id=nodepool_id)
1083
1164
 
1084
1165
  return self.stream(
1085
1166
  inputs=input_generator(),
@@ -7,7 +7,7 @@ from typing import List, Optional, Union
7
7
  class TextFeatures:
8
8
  """Text classification datasets preprocessing output features."""
9
9
  text: str
10
- labels: List[Union[str, int]] # List[str or int] to cater for multi-class tasks
10
+ labels: List[Union[str, int]] = None # List[str or int] to cater for multi-class tasks
11
11
  id: Optional[int] = None # text_id
12
12
  metadata: Optional[dict] = None
13
13
  label_ids: Optional[List[str]] = None
@@ -34,7 +34,8 @@ class MultiModalDataset(ClarifaiDataset):
34
34
  metadata = Struct()
35
35
  image_bytes = data_item.image_bytes
36
36
  text = data_item.text
37
- labels = data_item.labels if isinstance(data_item.labels, list) else [data_item.labels]
37
+ labels = data_item.labels if ((data_item.labels is None) or
38
+ isinstance(data_item.labels, list)) else [data_item.labels]
38
39
  input_id = f"{self.dataset_id}-{id}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
39
40
  if data_item.metadata is not None:
40
41
  metadata.update(data_item.metadata)
@@ -34,8 +34,9 @@ class TextClassificationDataset(ClarifaiDataset):
34
34
  data_item = self.data_generator[id]
35
35
  metadata = Struct()
36
36
  text = data_item.text
37
- labels = data_item.labels if isinstance(data_item.labels,
38
- list) else [data_item.labels] # clarifai concept
37
+ labels = data_item.labels if ((data_item.labels is None) or
38
+ isinstance(data_item.labels, list)) else [data_item.labels
39
+ ] # clarifai concept
39
40
  label_ids = data_item.label_ids
40
41
  input_id = f"{self.dataset_id}-{get_uuid(8)}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
41
42
  if data_item.metadata is not None:
@@ -0,0 +1,158 @@
1
+ # Clarifai Model Serving
2
+
3
+ ## Overview
4
+
5
+ Model Serving is a part of user journey at Clarifai offers a user-friendly interface for deploying your local model into production with Clarifai, featuring:
6
+
7
+ * A convenient command-line interface (CLI)
8
+ * Easy implementation and testing in Python
9
+ * No need for MLops expertise.
10
+
11
+ ## Quickstart Guide
12
+
13
+ Quick example for deploying a `text-to-text` model
14
+
15
+ ### Initialize a Clarifai model repository
16
+
17
+ Suppose your working directory name is `your_model_dir`. Then run
18
+
19
+ ```bash
20
+ $ clarifai create model --type text-to-text --working-dir your_model_dir
21
+ $ cd your_model_dir
22
+ ```
23
+
24
+ In `your_model_dir` folder you will see essential files for deployment process
25
+
26
+ ```bash
27
+ your_model_dir
28
+ ├── clarifai_config.yaml
29
+ ├── inference.py
30
+ ├── test.py
31
+ └── requirements.txt
32
+ ```
33
+
34
+ ### Implementation
35
+
36
+ Write your code in class `InferenceModel` which is an interface between your model and Clarifai server in `inference.py`, there are 2 functions you must implement:
37
+
38
+ * `__init__`: load your model checkpoint once.
39
+ * `predict`: make prediction, called everytime when you make request from API.
40
+
41
+ For example, a complete implementation of a hf text-generation model
42
+
43
+ ```python
44
+ import os
45
+ from typing import Dict, Union
46
+ from clarifai.models.model_serving.model_config import *
47
+
48
+ import torch
49
+ from transformers import AutoTokenizer
50
+ import transformers
51
+
52
+ class InferenceModel(TextToText):
53
+ """User model inference class."""
54
+
55
+ def __init__(self) -> None:
56
+ """
57
+ Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
58
+ in this method so they are loaded only once for faster inference.
59
+ """
60
+ # current directory
61
+ self.base_path = os.path.dirname(__file__)
62
+ # where you save hf checkpoint in your working dir e.i. `your_model_dir`
63
+ model_path = os.path.join(self.base_path, "checkpoint")
64
+ self.tokenizer = AutoTokenizer.from_pretrained(model_path)
65
+ self.pipeline = transformers.pipeline(
66
+ "text-generation",
67
+ model=model_path,
68
+ torch_dtype=torch.float16,
69
+ device_map="auto",
70
+ )
71
+
72
+ def predict(self, input_data: list,
73
+ inference_parameters: Dict[str, Union[str, float, int]]) -> list:
74
+ """ Custom prediction function for `text-to-text` (also called as `text generation`) model.
75
+
76
+ Args:
77
+ input_data (List[str]): List of text
78
+ inference_parameters (Dict[str, Union[str, float, int]]): your inference parameters
79
+
80
+ Returns:
81
+ list of TextOutput
82
+
83
+ """
84
+ output_sequences = self.pipeline(
85
+ input_data,
86
+ eos_token_id=self.tokenizer.eos_token_id,
87
+ **inference_parameters)
88
+
89
+ # wrap outputs in Clarifai defined output
90
+ return [TextOutput(each[0]) for each in output_sequences]
91
+ ```
92
+
93
+ Update dependencies in `requirements.txt`
94
+
95
+ ```
96
+ clarifai
97
+ torch=2.1.1
98
+ transformers==4.36.2
99
+ accelerate==0.26.1
100
+ ```
101
+
102
+ ### Test (optional)
103
+
104
+ > NOTE: Running `test` is also involved in `build` and `upload` command.
105
+
106
+ Test and play with your implementation by executing `test.py`.
107
+
108
+ Install pytest
109
+
110
+ ```bash
111
+ $ pip install pytest
112
+ ```
113
+
114
+ Execute test
115
+
116
+ ```bash
117
+ $ pytest test.py
118
+ ```
119
+
120
+ ### Build
121
+
122
+ Prepare for deployment step. Run:
123
+
124
+ ```bash
125
+ $ clarifai build model
126
+ ```
127
+
128
+ You will obtain `*.clarifai` file, it's simply a zip having all nessecary files in it to get your model work on Clarifai platform.
129
+
130
+ `NOTE`: you need to upload your built file to cloud storage to get direct download `url` for next step
131
+
132
+ ### Deployment
133
+
134
+ Login to Clarifai
135
+
136
+ ```bash
137
+ $ clarifai login
138
+ Get your PAT from https://clarifai.com/settings/security and pass it here: <insert your pat here>
139
+ ```
140
+
141
+ Upload
142
+
143
+ ```bash
144
+ # upload built file directly
145
+ $ clarifai upload model <your-working-dir> --user-app <your_user_id>/<your_app_id> --id <your_model_id>
146
+ # or using direct download url of cloud storage
147
+ $ clarifai upload model --url <url> --user-app <your_user_id>/<your_app_id> --id <your_model_id>
148
+ ```
149
+
150
+ ## Learn More
151
+
152
+ * [Detail Instruction](./docs/concepts.md)
153
+ * [Examples](https://github.com/Clarifai/examples/tree/main/model_upload)
154
+ * [Initialize from example](./docs/cli.md)
155
+ * [CLI usage](./docs/cli.md)
156
+ * [Inference parameters](./docs/inference_parameters.md)
157
+ * [Model Types](./docs/model_types.md)
158
+ * [Dependencies](./docs/dependencies.md)
@@ -0,0 +1,14 @@
1
+ # Copyright 2023 Clarifai, Inc.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ from .model_config import * # noqa
14
+ from .repo_build import * # noqa
@@ -0,0 +1,12 @@
1
+ # Copyright 2023 Clarifai, Inc.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.