clarifai 11.7.5__py3-none-any.whl → 11.7.5rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (231) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  3. clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
  4. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  5. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  6. clarifai/__pycache__/errors.cpython-39.pyc +0 -0
  7. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  8. clarifai/__pycache__/versions.cpython-39.pyc +0 -0
  9. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  10. clarifai/cli/__pycache__/__init__.cpython-39.pyc +0 -0
  11. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  12. clarifai/cli/__pycache__/base.cpython-39.pyc +0 -0
  13. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  14. clarifai/cli/__pycache__/compute_cluster.cpython-39.pyc +0 -0
  15. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  16. clarifai/cli/__pycache__/deployment.cpython-39.pyc +0 -0
  17. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  18. clarifai/cli/__pycache__/model.cpython-39.pyc +0 -0
  19. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  20. clarifai/cli/__pycache__/nodepool.cpython-39.pyc +0 -0
  21. clarifai/cli/__pycache__/pipeline.cpython-311.pyc +0 -0
  22. clarifai/cli/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  23. clarifai/cli/base.py +2 -2
  24. clarifai/cli/model.py +261 -139
  25. clarifai/cli/model_templates.py +243 -0
  26. clarifai/cli/pipeline.py +31 -11
  27. clarifai/cli/pipeline_step_templates.py +64 -0
  28. clarifai/cli/templates/__pycache__/__init__.cpython-311.pyc +0 -0
  29. clarifai/cli/templates/__pycache__/pipeline_templates.cpython-311.pyc +0 -0
  30. clarifai/cli/templates/pipeline_templates.py +34 -28
  31. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  32. clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
  33. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  34. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  35. clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
  36. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  37. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  38. clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
  39. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  40. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  41. clarifai/client/__pycache__/dataset.cpython-39.pyc +0 -0
  42. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  43. clarifai/client/__pycache__/deployment.cpython-39.pyc +0 -0
  44. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  45. clarifai/client/__pycache__/input.cpython-39.pyc +0 -0
  46. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  47. clarifai/client/__pycache__/lister.cpython-39.pyc +0 -0
  48. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  49. clarifai/client/__pycache__/model.cpython-39.pyc +0 -0
  50. clarifai/client/__pycache__/model_client.cpython-311.pyc +0 -0
  51. clarifai/client/__pycache__/model_client.cpython-39.pyc +0 -0
  52. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  53. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  54. clarifai/client/__pycache__/pipeline.cpython-311.pyc +0 -0
  55. clarifai/client/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  56. clarifai/client/__pycache__/runner.cpython-311.pyc +0 -0
  57. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  58. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  59. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  60. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  61. clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
  62. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  63. clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
  64. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  65. clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
  66. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  67. clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
  68. clarifai/client/pipeline.py +20 -4
  69. clarifai/client/user.py +172 -0
  70. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  71. clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
  72. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  73. clarifai/constants/__pycache__/dataset.cpython-39.pyc +0 -0
  74. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  75. clarifai/constants/__pycache__/input.cpython-39.pyc +0 -0
  76. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  77. clarifai/constants/__pycache__/model.cpython-39.pyc +0 -0
  78. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  79. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  80. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  81. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  82. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  83. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  84. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  85. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  86. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-39.pyc +0 -0
  87. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  88. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  89. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  90. clarifai/datasets/upload/__pycache__/base.cpython-39.pyc +0 -0
  91. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  92. clarifai/datasets/upload/__pycache__/features.cpython-39.pyc +0 -0
  93. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  94. clarifai/datasets/upload/__pycache__/image.cpython-39.pyc +0 -0
  95. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  96. clarifai/datasets/upload/__pycache__/multimodal.cpython-39.pyc +0 -0
  97. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  98. clarifai/datasets/upload/__pycache__/text.cpython-39.pyc +0 -0
  99. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  100. clarifai/datasets/upload/__pycache__/utils.cpython-39.pyc +0 -0
  101. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  102. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  103. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  104. clarifai/models/model_serving/README.md +158 -0
  105. clarifai/models/model_serving/__init__.py +14 -0
  106. clarifai/models/model_serving/cli/__init__.py +12 -0
  107. clarifai/models/model_serving/cli/_utils.py +53 -0
  108. clarifai/models/model_serving/cli/base.py +14 -0
  109. clarifai/models/model_serving/cli/build.py +79 -0
  110. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  111. clarifai/models/model_serving/cli/create.py +171 -0
  112. clarifai/models/model_serving/cli/example_cli.py +34 -0
  113. clarifai/models/model_serving/cli/login.py +26 -0
  114. clarifai/models/model_serving/cli/upload.py +179 -0
  115. clarifai/models/model_serving/constants.py +21 -0
  116. clarifai/models/model_serving/docs/cli.md +161 -0
  117. clarifai/models/model_serving/docs/concepts.md +229 -0
  118. clarifai/models/model_serving/docs/dependencies.md +11 -0
  119. clarifai/models/model_serving/docs/inference_parameters.md +139 -0
  120. clarifai/models/model_serving/docs/model_types.md +19 -0
  121. clarifai/models/model_serving/model_config/__init__.py +16 -0
  122. clarifai/models/model_serving/model_config/base.py +369 -0
  123. clarifai/models/model_serving/model_config/config.py +312 -0
  124. clarifai/models/model_serving/model_config/inference_parameter.py +129 -0
  125. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -0
  126. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -0
  127. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -0
  128. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -0
  129. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -0
  130. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -0
  131. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -0
  132. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -0
  133. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -0
  134. clarifai/models/model_serving/model_config/output.py +133 -0
  135. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  136. clarifai/models/model_serving/model_config/triton/serializer.py +136 -0
  137. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  138. clarifai/models/model_serving/model_config/triton/wrappers.py +281 -0
  139. clarifai/models/model_serving/repo_build/__init__.py +14 -0
  140. clarifai/models/model_serving/repo_build/build.py +198 -0
  141. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  142. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  143. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  144. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  145. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  146. clarifai/models/model_serving/repo_build/static_files/triton/model.py +75 -0
  147. clarifai/models/model_serving/utils.py +23 -0
  148. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  149. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  150. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  151. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  152. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  153. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  154. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  155. clarifai/runners/models/__pycache__/dummy_openai_model.cpython-311.pyc +0 -0
  156. clarifai/runners/models/__pycache__/mcp_class.cpython-311.pyc +0 -0
  157. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  158. clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
  159. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  160. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  161. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  162. clarifai/runners/models/__pycache__/model_servicer.cpython-311.pyc +0 -0
  163. clarifai/runners/models/__pycache__/openai_class.cpython-311.pyc +0 -0
  164. clarifai/runners/models/base_typed_model.py +238 -0
  165. clarifai/runners/models/model_builder.py +274 -10
  166. clarifai/runners/models/model_runner.py +93 -76
  167. clarifai/runners/models/model_upload.py +607 -0
  168. clarifai/runners/pipeline_steps/__pycache__/__init__.cpython-311.pyc +0 -0
  169. clarifai/runners/pipeline_steps/__pycache__/pipeline_step_builder.cpython-311.pyc +0 -0
  170. clarifai/runners/pipeline_steps/pipeline_step_builder.py +10 -1
  171. clarifai/runners/pipelines/__pycache__/__init__.cpython-311.pyc +0 -0
  172. clarifai/runners/pipelines/__pycache__/pipeline_builder.cpython-311.pyc +0 -0
  173. clarifai/runners/server.py +1 -1
  174. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  175. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  176. clarifai/runners/utils/__pycache__/code_script.cpython-311.pyc +0 -0
  177. clarifai/runners/utils/__pycache__/code_script.cpython-39.pyc +0 -0
  178. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  179. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  180. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  181. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  182. clarifai/runners/utils/__pycache__/method_signatures.cpython-311.pyc +0 -0
  183. clarifai/runners/utils/__pycache__/model_utils.cpython-311.pyc +0 -0
  184. clarifai/runners/utils/__pycache__/openai_convertor.cpython-311.pyc +0 -0
  185. clarifai/runners/utils/__pycache__/pipeline_validation.cpython-311.pyc +0 -0
  186. clarifai/runners/utils/__pycache__/serializers.cpython-311.pyc +0 -0
  187. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  188. clarifai/runners/utils/data_handler.py +231 -0
  189. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-311.pyc +0 -0
  190. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-39.pyc +0 -0
  191. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-311.pyc +0 -0
  192. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-39.pyc +0 -0
  193. clarifai/runners/utils/data_types.py +471 -0
  194. clarifai/runners/utils/loader.py +24 -4
  195. clarifai/runners/utils/temp.py +59 -0
  196. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  197. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  198. clarifai/urls/__pycache__/helper.cpython-39.pyc +0 -0
  199. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  200. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  201. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  202. clarifai/utils/__pycache__/cli.cpython-39.pyc +0 -0
  203. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  204. clarifai/utils/__pycache__/config.cpython-39.pyc +0 -0
  205. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  206. clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
  207. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  208. clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
  209. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  210. clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
  211. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  212. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  213. clarifai/utils/__pycache__/protobuf.cpython-39.pyc +0 -0
  214. clarifai/utils/__pycache__/secrets.cpython-311.pyc +0 -0
  215. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  216. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  217. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  218. clarifai/utils/evaluation/__pycache__/testset_annotation_parser.cpython-311.pyc +0 -0
  219. clarifai/utils/misc.py +20 -0
  220. clarifai/utils/secrets.py +96 -1
  221. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  222. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  223. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  224. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  225. {clarifai-11.7.5.dist-info → clarifai-11.7.5rc1.dist-info}/METADATA +1 -1
  226. clarifai-11.7.5rc1.dist-info/RECORD +339 -0
  227. {clarifai-11.7.5.dist-info → clarifai-11.7.5rc1.dist-info}/WHEEL +1 -1
  228. clarifai-11.7.5.dist-info/RECORD +0 -129
  229. {clarifai-11.7.5.dist-info → clarifai-11.7.5rc1.dist-info}/entry_points.txt +0 -0
  230. {clarifai-11.7.5.dist-info → clarifai-11.7.5rc1.dist-info}/licenses/LICENSE +0 -0
  231. {clarifai-11.7.5.dist-info → clarifai-11.7.5rc1.dist-info}/top_level.txt +0 -0
@@ -10,7 +10,7 @@ import tarfile
10
10
  import time
11
11
  import webbrowser
12
12
  from string import Template
13
- from typing import Literal
13
+ from typing import Any, Dict, Literal, Optional
14
14
  from unittest.mock import MagicMock
15
15
 
16
16
  import yaml
@@ -102,6 +102,7 @@ class ModelBuilder:
102
102
  self.folder = self._validate_folder(folder)
103
103
  self.config = self._load_config(os.path.join(self.folder, 'config.yaml'))
104
104
  self._validate_config()
105
+ self._validate_config_secrets()
105
106
  self._validate_stream_options()
106
107
  self.model_proto = self._get_model_proto()
107
108
  self.model_id = self.model_proto.id
@@ -465,6 +466,115 @@ class ModelBuilder:
465
466
  "2) set_output_context"
466
467
  )
467
468
 
469
+ def _validate_config_secrets(self):
470
+ """
471
+ Validate the secrets section in the config file.
472
+ """
473
+ if "secrets" not in self.config:
474
+ return
475
+
476
+ secrets = self.config.get("secrets", [])
477
+ if not isinstance(secrets, list):
478
+ raise ValueError("The 'secrets' field must be an array.")
479
+
480
+ for i, secret in enumerate(secrets):
481
+ if not isinstance(secret, dict):
482
+ raise ValueError(f"Secret at index {i} must be a dictionary.")
483
+
484
+ # Validate required fields
485
+ if "id" not in secret or not secret["id"]:
486
+ raise ValueError(f"Secret at index {i} must have a non-empty 'id' field.")
487
+
488
+ if "type" not in secret or not secret["type"]:
489
+ secret["type"] = "env"
490
+
491
+ if "env_var" not in secret or not secret["env_var"]:
492
+ raise ValueError(f"Secret at index {i} must have a non-empty 'env_var' field.")
493
+ # Validate secret type
494
+ if secret["type"] != "env":
495
+ raise ValueError(
496
+ f"Secret at index {i} has invalid type '{secret['type']}'. Must be 'env'."
497
+ )
498
+
499
+ logger.info(f"Validated {len(secrets)} secrets in config file.")
500
+
501
+ def _process_secrets(self):
502
+ """
503
+ Process secrets from config file and create/validate them using the User client.
504
+ Returns the processed secrets array for inclusion in ModelVersion.OutputInfo.Params.
505
+ """
506
+ if "secrets" not in self.config:
507
+ return []
508
+
509
+ secrets = self.config.get("secrets", [])
510
+ if not secrets:
511
+ return []
512
+
513
+ # Get user client for secret operations
514
+ user = User(
515
+ user_id=self.config.get('model').get('user_id'),
516
+ pat=self.client.pat,
517
+ token=self.client.token,
518
+ base_url=self.client.base,
519
+ )
520
+
521
+ processed_secrets = []
522
+ secrets_to_create = []
523
+
524
+ for secret in secrets:
525
+ secret_id = secret["id"]
526
+ secret_type = secret.get("type", "env")
527
+ env_var = secret["env_var"]
528
+ secret_value = secret.get("value") # Optional for existing secrets
529
+
530
+ # Check if secret already exists
531
+ try:
532
+ existing_secret = user.get_secret(secret_id)
533
+ logger.info(f"Secret '{secret_id}' already exists, using existing secret.")
534
+
535
+ # Add to processed secrets without the value
536
+ processed_secret = {
537
+ "id": secret_id,
538
+ "type": secret_type,
539
+ "env_var": env_var,
540
+ }
541
+ processed_secrets.append(processed_secret)
542
+
543
+ except Exception:
544
+ # Secret doesn't exist, need to create it
545
+ if secret_value:
546
+ logger.info(f"Secret '{secret_id}' does not exist, will create it.")
547
+ secrets_to_create.append(
548
+ {
549
+ "id": secret_id,
550
+ "value": secret_value,
551
+ "description": secret.get("description", f"Secret for {env_var}"),
552
+ }
553
+ )
554
+
555
+ # Add to processed secrets
556
+ processed_secret = {
557
+ "id": secret_id,
558
+ "type": secret_type,
559
+ "env_var": env_var,
560
+ }
561
+ processed_secrets.append(processed_secret)
562
+ else:
563
+ raise ValueError(
564
+ f"Secret '{secret_id}' does not exist and no value provided for creation."
565
+ )
566
+
567
+ # Create new secrets if any
568
+ if secrets_to_create:
569
+ try:
570
+ created_secrets = user.create_secrets(secrets_to_create)
571
+ logger.info(f"Successfully created {len(created_secrets)} new secrets.")
572
+ except Exception as e:
573
+ logger.error(f"Failed to create secrets: {e}")
574
+ raise
575
+
576
+ return processed_secrets
577
+
468
578
  def _is_clarifai_internal(self):
469
579
  """
470
580
  Check if the current user is a Clarifai internal user based on email domain.
@@ -1107,7 +1217,7 @@ class ModelBuilder:
1107
1217
  logger.error(f"Failed to download checkpoints for model {repo_id}")
1108
1218
  sys.exit(1)
1109
1219
  else:
1110
- logger.info(f"Downloaded checkpoints for model {repo_id}")
1220
+ logger.info(f"Downloaded checkpoints for model {repo_id} successfully to {path}")
1111
1221
  return path
1112
1222
 
1113
1223
  def _concepts_protos_from_concepts(self, concepts):
@@ -1140,7 +1250,109 @@ class ModelBuilder:
1140
1250
  concepts = config.get('concepts')
1141
1251
  logger.info(f"Updated config.yaml with {len(concepts)} concepts.")
1142
1252
 
1143
- def get_model_version_proto(self):
1253
+ def _get_git_info(self) -> Optional[Dict[str, Any]]:
1254
+ """
1255
+ Get git repository information for the model path.
1256
+
1257
+ Returns:
1258
+ Dict with git info (url, commit, branch) or None if not a git repository
1259
+ """
1260
+ try:
1261
+ # Check if the folder is within a git repository
1262
+ result = subprocess.run(
1263
+ ['git', 'rev-parse', '--git-dir'],
1264
+ cwd=self.folder,
1265
+ capture_output=True,
1266
+ text=True,
1267
+ check=True,
1268
+ )
1269
+
1270
+ # Get git remote URL
1271
+ remote_result = subprocess.run(
1272
+ ['git', 'config', '--get', 'remote.origin.url'],
1273
+ cwd=self.folder,
1274
+ capture_output=True,
1275
+ text=True,
1276
+ check=False,
1277
+ )
1278
+
1279
+ # Get current commit hash
1280
+ commit_result = subprocess.run(
1281
+ ['git', 'rev-parse', 'HEAD'],
1282
+ cwd=self.folder,
1283
+ capture_output=True,
1284
+ text=True,
1285
+ check=True,
1286
+ )
1287
+
1288
+ # Get current branch
1289
+ branch_result = subprocess.run(
1290
+ ['git', 'branch', '--show-current'],
1291
+ cwd=self.folder,
1292
+ capture_output=True,
1293
+ text=True,
1294
+ check=False,
1295
+ )
1296
+
1297
+ git_info = {
1298
+ 'commit': commit_result.stdout.strip(),
1299
+ 'branch': branch_result.stdout.strip()
1300
+ if branch_result.returncode == 0
1301
+ else 'HEAD',
1302
+ }
1303
+
1304
+ if remote_result.returncode == 0:
1305
+ git_info['url'] = remote_result.stdout.strip()
1306
+
1307
+ return git_info
1308
+
1309
+ except (subprocess.CalledProcessError, FileNotFoundError):
1310
+ # Not a git repository or git not available
1311
+ return None
1312
+
1313
+ def _check_git_status_and_prompt(self) -> bool:
1314
+ """
1315
+ Check for uncommitted changes in git repository within the model path and prompt user.
1316
+
1317
+ Returns:
1318
+ True if should continue with upload, False if should abort
1319
+ """
1320
+ try:
1321
+ # Check for uncommitted changes within the model path only
1322
+ status_result = subprocess.run(
1323
+ ['git', 'status', '--porcelain', '.'],
1324
+ cwd=self.folder,
1325
+ capture_output=True,
1326
+ text=True,
1327
+ check=True,
1328
+ )
1329
+
1330
+ if status_result.stdout.strip():
1331
+ logger.warning("Uncommitted changes detected in model path:")
1332
+ logger.warning(status_result.stdout)
1333
+
1334
+ response = input(
1335
+ "\nDo you want to continue upload with uncommitted changes? (y/N): "
1336
+ )
1337
+ return response.lower() in ['y', 'yes']
1338
+ else:
1339
+ logger.info("Model path has no uncommitted changes.")
1340
+ return True
1341
+
1342
+ except (subprocess.CalledProcessError, FileNotFoundError):
1343
+ # Error checking git status, but we already know it's a git repo
1344
+ logger.warning("Could not check git status, continuing with upload.")
1345
+ return True
1346
+
1347
+ def get_model_version_proto(self, git_info: Optional[Dict[str, Any]] = None):
1348
+ """
1349
+ Create a ModelVersion protobuf message for the model.
1350
+ Args:
1351
+ git_info (Optional[Dict[str, Any]]): Git repository information to include in metadata.
1352
+ Returns:
1353
+ resources_pb2.ModelVersion: The ModelVersion protobuf message.
1354
+ """
1355
+
1144
1356
  signatures = self.get_method_signatures()
1145
1357
  model_version_proto = resources_pb2.ModelVersion(
1146
1358
  pretrained_model_config=resources_pb2.PretrainedModelConfig(),
@@ -1148,6 +1360,37 @@ class ModelBuilder:
1148
1360
  method_signatures=signatures,
1149
1361
  )
1150
1362
 
1363
+ # Add git information to metadata if available
1364
+ if git_info:
1365
+ from google.protobuf.struct_pb2 import Struct
1366
+
1367
+ metadata_struct = Struct()
1368
+ metadata_struct.update({'git_registry': git_info})
1369
+ model_version_proto.metadata.CopyFrom(metadata_struct)
1370
+
1371
+ # Process and add secrets to output_info.params
1372
+ try:
1373
+ processed_secrets = self._process_secrets()
1374
+ if processed_secrets:
1375
+ # Initialize output_info.params if not already present
1376
+ if not model_version_proto.HasField("output_info"):
1377
+ model_version_proto.output_info.CopyFrom(resources_pb2.OutputInfo())
1378
+
1379
+ # Initialize params if not already present
1380
+ if not model_version_proto.output_info.HasField("params"):
1381
+ from google.protobuf.struct_pb2 import Struct
1382
+
1383
+ model_version_proto.output_info.params.CopyFrom(Struct())
1384
+
1385
+ # Add secrets to params
1386
+ model_version_proto.output_info.params.update({"secrets": processed_secrets})
1387
+ logger.info(
1388
+ f"Added {len(processed_secrets)} secrets to model version output_info.params"
1389
+ )
1390
+ except Exception as e:
1391
+ logger.error(f"Failed to process secrets: {e}")
1392
+ raise
1393
+
1151
1394
  model_type_id = self.config.get('model').get('model_type_id')
1152
1395
  if model_type_id in CONCEPTS_REQUIRED_MODEL_TYPE:
1153
1396
  if 'concepts' in self.config:
@@ -1175,7 +1418,7 @@ class ModelBuilder:
1175
1418
  )
1176
1419
  return model_version_proto
1177
1420
 
1178
- def upload_model_version(self):
1421
+ def upload_model_version(self, git_info=None):
1179
1422
  file_path = f"{self.folder}.tar.gz"
1180
1423
  logger.debug(f"Will tar it into file: {file_path}")
1181
1424
 
@@ -1208,7 +1451,7 @@ class ModelBuilder:
1208
1451
  )
1209
1452
  return
1210
1453
 
1211
- model_version_proto = self.get_model_version_proto()
1454
+ model_version_proto = self.get_model_version_proto(git_info)
1212
1455
 
1213
1456
  def filter_func(tarinfo):
1214
1457
  name = tarinfo.name
@@ -1228,7 +1471,7 @@ class ModelBuilder:
1228
1471
  if when != "upload" and self.config.get("checkpoints"):
1229
1472
  # Get the checkpoint size to add to the storage request.
1230
1473
  # First check for the env variable, then try querying huggingface. If all else fails, use the default.
1231
- checkpoint_size = os.environ.get('CHECKPOINT_SIZE_BYTES', 0)
1474
+ checkpoint_size = int(os.environ.get('CHECKPOINT_SIZE_BYTES', 0))
1232
1475
  if not checkpoint_size:
1233
1476
  _, repo_id, _, _, _, _ = self._validate_config_checkpoints()
1234
1477
  checkpoint_size = HuggingFaceLoader.get_huggingface_checkpoint_total_size(repo_id)
@@ -1332,13 +1575,13 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1332
1575
  )
1333
1576
  return result
1334
1577
 
1335
- def get_model_build_logs(self):
1578
+ def get_model_build_logs(self, current_page=1):
1336
1579
  logs_request = service_pb2.ListLogEntriesRequest(
1337
1580
  log_type="builder",
1338
1581
  user_app_id=self.client.user_app_id,
1339
1582
  model_id=self.model_proto.id,
1340
1583
  model_version_id=self.model_version_id,
1341
- page=1,
1584
+ page=current_page,
1342
1585
  per_page=50,
1343
1586
  )
1344
1587
  response = self.client.STUB.ListLogEntries(logs_request)
@@ -1347,6 +1590,7 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1347
1590
  def monitor_model_build(self):
1348
1591
  st = time.time()
1349
1592
  seen_logs = set() # To avoid duplicate log messages
1593
+ current_page = 1 # Track current page for log pagination
1350
1594
  while True:
1351
1595
  resp = self.client.STUB.GetModelVersion(
1352
1596
  service_pb2.GetModelVersionRequest(
@@ -1357,8 +1601,10 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1357
1601
  )
1358
1602
 
1359
1603
  status_code = resp.model_version.status.code
1360
- logs = self.get_model_build_logs()
1604
+ logs = self.get_model_build_logs(current_page)
1605
+ entries_count = 0
1361
1606
  for log_entry in logs.log_entries:
1607
+ entries_count += 1
1362
1608
  if log_entry.url not in seen_logs:
1363
1609
  seen_logs.add(log_entry.url)
1364
1610
  log_entry_msg = re.sub(
@@ -1367,6 +1613,12 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1367
1613
  log_entry.message.strip(),
1368
1614
  )
1369
1615
  logger.info(log_entry_msg)
1616
+
1617
+ # If we got a full page (50 entries), there might be more logs on the next page
1618
+ # If we got fewer than 50 entries, we've reached the end and should stay on current page
1619
+ if entries_count == 50:
1620
+ current_page += 1
1621
+ # else: stay on current_page
1370
1622
  if status_code == status_code_pb2.MODEL_BUILDING:
1371
1623
  print(
1372
1624
  f"Model is building... (elapsed {time.time() - st:.1f}s)", end='\r', flush=True
@@ -1414,8 +1666,20 @@ def upload_model(folder, stage, skip_dockerfile, pat=None, base_url=None):
1414
1666
  f"New model will be created at {builder.model_ui_url} with it's first version."
1415
1667
  )
1416
1668
 
1669
+ # Check for git repository information
1670
+ git_info = builder._get_git_info()
1671
+ if git_info:
1672
+ logger.info(f"Detected git repository: {git_info.get('url', 'local repository')}")
1673
+ logger.info(f"Current commit: {git_info['commit']}")
1674
+ logger.info(f"Current branch: {git_info['branch']}")
1675
+
1676
+ # Check for uncommitted changes and prompt user
1677
+ if not builder._check_git_status_and_prompt():
1678
+ logger.info("Upload cancelled by user due to uncommitted changes.")
1679
+ return
1417
1680
  input("Press Enter to continue...")
1418
- model_version = builder.upload_model_version()
1681
+
1682
+ model_version = builder.upload_model_version(git_info)
1419
1683
 
1420
1684
  # Ask user if they want to deploy the model
1421
1685
  if model_version is not None: # if it comes back None then it failed.
@@ -9,7 +9,7 @@ from clarifai_protocol.utils.health import HealthProbeRequestHandler
9
9
  from clarifai.client.auth.helper import ClarifaiAuthHelper
10
10
  from clarifai.utils.constants import STATUS_FAIL, STATUS_MIXED, STATUS_OK, STATUS_UNKNOWN
11
11
  from clarifai.utils.logging import get_req_id_from_context, logger
12
- from clarifai.utils.secrets import inject_secrets
12
+ from clarifai.utils.secrets import inject_secrets, req_secrets_context
13
13
 
14
14
  from ..utils.url_fetcher import ensure_urls_downloaded
15
15
  from .model_class import ModelClass
@@ -126,7 +126,10 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
126
126
  if method_name == '_GET_SIGNATURES':
127
127
  logging = False
128
128
 
129
- resp = self.model.predict_wrapper(request)
129
+ # Use req_secrets_context to temporarily set request-type secrets as environment variables
130
+ with req_secrets_context(request):
131
+ resp = self.model.predict_wrapper(request)
132
+
130
133
  # if we have any non-successful code already it's an error we can return.
131
134
  if (
132
135
  resp.status.code != status_code_pb2.SUCCESS
@@ -185,45 +188,49 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
185
188
  status_str = STATUS_UNKNOWN
186
189
  endpoint = "POST /v2/.../outputs/generate"
187
190
 
188
- for resp in self.model.generate_wrapper(request):
189
- # if we have any non-successful code already it's an error we can return.
190
- if (
191
- resp.status.code != status_code_pb2.SUCCESS
192
- and resp.status.code != status_code_pb2.ZERO
193
- ):
194
- status_str = f"{resp.status.code} ERROR"
195
- duration_ms = (time.time() - start_time) * 1000
196
- logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
197
- yield service_pb2.RunnerItemOutput(multi_output_response=resp)
198
- continue
199
- successes = []
200
- for output in resp.outputs:
201
- if not output.HasField('status') or not output.status.code:
202
- raise Exception(
203
- "Output must have a status code, please check the model implementation."
191
+ # Use req_secrets_context to temporarily set request-type secrets as environment variables
192
+ with req_secrets_context(request):
193
+ for resp in self.model.generate_wrapper(request):
194
+ # if we have any non-successful code already it's an error we can return.
195
+ if (
196
+ resp.status.code != status_code_pb2.SUCCESS
197
+ and resp.status.code != status_code_pb2.ZERO
198
+ ):
199
+ status_str = f"{resp.status.code} ERROR"
200
+ duration_ms = (time.time() - start_time) * 1000
201
+ logger.info(
202
+ f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}"
204
203
  )
205
- successes.append(output.status.code == status_code_pb2.SUCCESS)
206
- if all(successes):
207
- status = status_pb2.Status(
208
- code=status_code_pb2.SUCCESS,
209
- description="Success",
210
- )
211
- status_str = STATUS_OK
212
- elif any(successes):
213
- status = status_pb2.Status(
214
- code=status_code_pb2.MIXED_STATUS,
215
- description="Mixed Status",
216
- )
217
- status_str = STATUS_MIXED
218
- else:
219
- status = status_pb2.Status(
220
- code=status_code_pb2.FAILURE,
221
- description="Failed",
222
- )
223
- status_str = STATUS_FAIL
224
- resp.status.CopyFrom(status)
204
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
205
+ continue
206
+ successes = []
207
+ for output in resp.outputs:
208
+ if not output.HasField('status') or not output.status.code:
209
+ raise Exception(
210
+ "Output must have a status code, please check the model implementation."
211
+ )
212
+ successes.append(output.status.code == status_code_pb2.SUCCESS)
213
+ if all(successes):
214
+ status = status_pb2.Status(
215
+ code=status_code_pb2.SUCCESS,
216
+ description="Success",
217
+ )
218
+ status_str = STATUS_OK
219
+ elif any(successes):
220
+ status = status_pb2.Status(
221
+ code=status_code_pb2.MIXED_STATUS,
222
+ description="Mixed Status",
223
+ )
224
+ status_str = STATUS_MIXED
225
+ else:
226
+ status = status_pb2.Status(
227
+ code=status_code_pb2.FAILURE,
228
+ description="Failed",
229
+ )
230
+ status_str = STATUS_FAIL
231
+ resp.status.CopyFrom(status)
225
232
 
226
- yield service_pb2.RunnerItemOutput(multi_output_response=resp)
233
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
227
234
 
228
235
  duration_ms = (time.time() - start_time) * 1000
229
236
  logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
@@ -237,45 +244,55 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
237
244
  status_str = STATUS_UNKNOWN
238
245
  endpoint = "POST /v2/.../outputs/stream "
239
246
 
240
- for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
241
- # if we have any non-successful code already it's an error we can return.
242
- if (
243
- resp.status.code != status_code_pb2.SUCCESS
244
- and resp.status.code != status_code_pb2.ZERO
245
- ):
246
- status_str = f"{resp.status.code} ERROR"
247
- duration_ms = (time.time() - start_time) * 1000
248
- logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
249
- yield service_pb2.RunnerItemOutput(multi_output_response=resp)
250
- continue
251
- successes = []
252
- for output in resp.outputs:
253
- if not output.HasField('status') or not output.status.code:
254
- raise Exception(
255
- "Output must have a status code, please check the model implementation."
247
+ # Get the first request to establish secrets context
248
+ first_request = None
249
+ runner_items = list(runner_item_iterator) # Convert to list to avoid consuming iterator
250
+ if runner_items:
251
+ first_request = runner_items[0].post_model_outputs_request
252
+
253
+ # Use req_secrets_context based on the first request (secrets should be consistent across stream)
254
+ with req_secrets_context(first_request):
255
+ for resp in self.model.stream_wrapper(pmo_iterator(iter(runner_items))):
256
+ # if we have any non-successful code already it's an error we can return.
257
+ if (
258
+ resp.status.code != status_code_pb2.SUCCESS
259
+ and resp.status.code != status_code_pb2.ZERO
260
+ ):
261
+ status_str = f"{resp.status.code} ERROR"
262
+ duration_ms = (time.time() - start_time) * 1000
263
+ logger.info(
264
+ f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}"
256
265
  )
257
- successes.append(output.status.code == status_code_pb2.SUCCESS)
258
- if all(successes):
259
- status = status_pb2.Status(
260
- code=status_code_pb2.SUCCESS,
261
- description="Success",
262
- )
263
- status_str = STATUS_OK
264
- elif any(successes):
265
- status = status_pb2.Status(
266
- code=status_code_pb2.MIXED_STATUS,
267
- description="Mixed Status",
268
- )
269
- status_str = STATUS_MIXED
270
- else:
271
- status = status_pb2.Status(
272
- code=status_code_pb2.FAILURE,
273
- description="Failed",
274
- )
275
- status_str = STATUS_FAIL
276
- resp.status.CopyFrom(status)
266
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
267
+ continue
268
+ successes = []
269
+ for output in resp.outputs:
270
+ if not output.HasField('status') or not output.status.code:
271
+ raise Exception(
272
+ "Output must have a status code, please check the model implementation."
273
+ )
274
+ successes.append(output.status.code == status_code_pb2.SUCCESS)
275
+ if all(successes):
276
+ status = status_pb2.Status(
277
+ code=status_code_pb2.SUCCESS,
278
+ description="Success",
279
+ )
280
+ status_str = STATUS_OK
281
+ elif any(successes):
282
+ status = status_pb2.Status(
283
+ code=status_code_pb2.MIXED_STATUS,
284
+ description="Mixed Status",
285
+ )
286
+ status_str = STATUS_MIXED
287
+ else:
288
+ status = status_pb2.Status(
289
+ code=status_code_pb2.FAILURE,
290
+ description="Failed",
291
+ )
292
+ status_str = STATUS_FAIL
293
+ resp.status.CopyFrom(status)
277
294
 
278
- yield service_pb2.RunnerItemOutput(multi_output_response=resp)
295
+ yield service_pb2.RunnerItemOutput(multi_output_response=resp)
279
296
 
280
297
  duration_ms = (time.time() - start_time) * 1000
281
298
  logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")