clarifai 11.7.4__py3-none-any.whl → 11.7.5rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (231) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-311.pyc +0 -0
  3. clarifai/__pycache__/__init__.cpython-312.pyc +0 -0
  4. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  5. clarifai/__pycache__/errors.cpython-311.pyc +0 -0
  6. clarifai/__pycache__/errors.cpython-39.pyc +0 -0
  7. clarifai/__pycache__/versions.cpython-311.pyc +0 -0
  8. clarifai/__pycache__/versions.cpython-39.pyc +0 -0
  9. clarifai/cli/__pycache__/__init__.cpython-311.pyc +0 -0
  10. clarifai/cli/__pycache__/__init__.cpython-39.pyc +0 -0
  11. clarifai/cli/__pycache__/base.cpython-311.pyc +0 -0
  12. clarifai/cli/__pycache__/base.cpython-39.pyc +0 -0
  13. clarifai/cli/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  14. clarifai/cli/__pycache__/compute_cluster.cpython-39.pyc +0 -0
  15. clarifai/cli/__pycache__/deployment.cpython-311.pyc +0 -0
  16. clarifai/cli/__pycache__/deployment.cpython-39.pyc +0 -0
  17. clarifai/cli/__pycache__/model.cpython-311.pyc +0 -0
  18. clarifai/cli/__pycache__/model.cpython-39.pyc +0 -0
  19. clarifai/cli/__pycache__/nodepool.cpython-311.pyc +0 -0
  20. clarifai/cli/__pycache__/nodepool.cpython-39.pyc +0 -0
  21. clarifai/cli/__pycache__/pipeline.cpython-311.pyc +0 -0
  22. clarifai/cli/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  23. clarifai/cli/base.py +2 -2
  24. clarifai/cli/model.py +264 -144
  25. clarifai/cli/model_templates.py +243 -0
  26. clarifai/cli/pipeline.py +31 -11
  27. clarifai/cli/pipeline_step_templates.py +64 -0
  28. clarifai/cli/templates/__pycache__/__init__.cpython-311.pyc +0 -0
  29. clarifai/cli/templates/__pycache__/pipeline_templates.cpython-311.pyc +0 -0
  30. clarifai/cli/templates/pipeline_templates.py +34 -28
  31. clarifai/client/__pycache__/__init__.cpython-311.pyc +0 -0
  32. clarifai/client/__pycache__/__init__.cpython-312.pyc +0 -0
  33. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  34. clarifai/client/__pycache__/app.cpython-311.pyc +0 -0
  35. clarifai/client/__pycache__/app.cpython-312.pyc +0 -0
  36. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  37. clarifai/client/__pycache__/base.cpython-311.pyc +0 -0
  38. clarifai/client/__pycache__/base.cpython-39.pyc +0 -0
  39. clarifai/client/__pycache__/compute_cluster.cpython-311.pyc +0 -0
  40. clarifai/client/__pycache__/dataset.cpython-311.pyc +0 -0
  41. clarifai/client/__pycache__/dataset.cpython-39.pyc +0 -0
  42. clarifai/client/__pycache__/deployment.cpython-311.pyc +0 -0
  43. clarifai/client/__pycache__/deployment.cpython-39.pyc +0 -0
  44. clarifai/client/__pycache__/input.cpython-311.pyc +0 -0
  45. clarifai/client/__pycache__/input.cpython-39.pyc +0 -0
  46. clarifai/client/__pycache__/lister.cpython-311.pyc +0 -0
  47. clarifai/client/__pycache__/lister.cpython-39.pyc +0 -0
  48. clarifai/client/__pycache__/model.cpython-311.pyc +0 -0
  49. clarifai/client/__pycache__/model.cpython-39.pyc +0 -0
  50. clarifai/client/__pycache__/model_client.cpython-311.pyc +0 -0
  51. clarifai/client/__pycache__/model_client.cpython-39.pyc +0 -0
  52. clarifai/client/__pycache__/module.cpython-311.pyc +0 -0
  53. clarifai/client/__pycache__/nodepool.cpython-311.pyc +0 -0
  54. clarifai/client/__pycache__/pipeline.cpython-311.pyc +0 -0
  55. clarifai/client/__pycache__/pipeline_step.cpython-311.pyc +0 -0
  56. clarifai/client/__pycache__/runner.cpython-311.pyc +0 -0
  57. clarifai/client/__pycache__/search.cpython-311.pyc +0 -0
  58. clarifai/client/__pycache__/user.cpython-311.pyc +0 -0
  59. clarifai/client/__pycache__/workflow.cpython-311.pyc +0 -0
  60. clarifai/client/auth/__pycache__/__init__.cpython-311.pyc +0 -0
  61. clarifai/client/auth/__pycache__/__init__.cpython-39.pyc +0 -0
  62. clarifai/client/auth/__pycache__/helper.cpython-311.pyc +0 -0
  63. clarifai/client/auth/__pycache__/helper.cpython-39.pyc +0 -0
  64. clarifai/client/auth/__pycache__/register.cpython-311.pyc +0 -0
  65. clarifai/client/auth/__pycache__/register.cpython-39.pyc +0 -0
  66. clarifai/client/auth/__pycache__/stub.cpython-311.pyc +0 -0
  67. clarifai/client/auth/__pycache__/stub.cpython-39.pyc +0 -0
  68. clarifai/client/pipeline.py +20 -4
  69. clarifai/client/user.py +172 -0
  70. clarifai/constants/__pycache__/base.cpython-311.pyc +0 -0
  71. clarifai/constants/__pycache__/base.cpython-39.pyc +0 -0
  72. clarifai/constants/__pycache__/dataset.cpython-311.pyc +0 -0
  73. clarifai/constants/__pycache__/dataset.cpython-39.pyc +0 -0
  74. clarifai/constants/__pycache__/input.cpython-311.pyc +0 -0
  75. clarifai/constants/__pycache__/input.cpython-39.pyc +0 -0
  76. clarifai/constants/__pycache__/model.cpython-311.pyc +0 -0
  77. clarifai/constants/__pycache__/model.cpython-39.pyc +0 -0
  78. clarifai/constants/__pycache__/rag.cpython-311.pyc +0 -0
  79. clarifai/constants/__pycache__/search.cpython-311.pyc +0 -0
  80. clarifai/constants/__pycache__/workflow.cpython-311.pyc +0 -0
  81. clarifai/datasets/__pycache__/__init__.cpython-311.pyc +0 -0
  82. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  83. clarifai/datasets/export/__pycache__/__init__.cpython-311.pyc +0 -0
  84. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  85. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-311.pyc +0 -0
  86. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-39.pyc +0 -0
  87. clarifai/datasets/upload/__pycache__/__init__.cpython-311.pyc +0 -0
  88. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  89. clarifai/datasets/upload/__pycache__/base.cpython-311.pyc +0 -0
  90. clarifai/datasets/upload/__pycache__/base.cpython-39.pyc +0 -0
  91. clarifai/datasets/upload/__pycache__/features.cpython-311.pyc +0 -0
  92. clarifai/datasets/upload/__pycache__/features.cpython-39.pyc +0 -0
  93. clarifai/datasets/upload/__pycache__/image.cpython-311.pyc +0 -0
  94. clarifai/datasets/upload/__pycache__/image.cpython-39.pyc +0 -0
  95. clarifai/datasets/upload/__pycache__/multimodal.cpython-311.pyc +0 -0
  96. clarifai/datasets/upload/__pycache__/multimodal.cpython-39.pyc +0 -0
  97. clarifai/datasets/upload/__pycache__/text.cpython-311.pyc +0 -0
  98. clarifai/datasets/upload/__pycache__/text.cpython-39.pyc +0 -0
  99. clarifai/datasets/upload/__pycache__/utils.cpython-311.pyc +0 -0
  100. clarifai/datasets/upload/__pycache__/utils.cpython-39.pyc +0 -0
  101. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-311.pyc +0 -0
  102. clarifai/datasets/upload/loaders/__pycache__/coco_detection.cpython-311.pyc +0 -0
  103. clarifai/datasets/upload/loaders/__pycache__/imagenet_classification.cpython-311.pyc +0 -0
  104. clarifai/models/model_serving/README.md +158 -0
  105. clarifai/models/model_serving/__init__.py +14 -0
  106. clarifai/models/model_serving/cli/__init__.py +12 -0
  107. clarifai/models/model_serving/cli/_utils.py +53 -0
  108. clarifai/models/model_serving/cli/base.py +14 -0
  109. clarifai/models/model_serving/cli/build.py +79 -0
  110. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  111. clarifai/models/model_serving/cli/create.py +171 -0
  112. clarifai/models/model_serving/cli/example_cli.py +34 -0
  113. clarifai/models/model_serving/cli/login.py +26 -0
  114. clarifai/models/model_serving/cli/upload.py +179 -0
  115. clarifai/models/model_serving/constants.py +21 -0
  116. clarifai/models/model_serving/docs/cli.md +161 -0
  117. clarifai/models/model_serving/docs/concepts.md +229 -0
  118. clarifai/models/model_serving/docs/dependencies.md +11 -0
  119. clarifai/models/model_serving/docs/inference_parameters.md +139 -0
  120. clarifai/models/model_serving/docs/model_types.md +19 -0
  121. clarifai/models/model_serving/model_config/__init__.py +16 -0
  122. clarifai/models/model_serving/model_config/base.py +369 -0
  123. clarifai/models/model_serving/model_config/config.py +312 -0
  124. clarifai/models/model_serving/model_config/inference_parameter.py +129 -0
  125. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -0
  126. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -0
  127. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -0
  128. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -0
  129. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -0
  130. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -0
  131. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -0
  132. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -0
  133. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -0
  134. clarifai/models/model_serving/model_config/output.py +133 -0
  135. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  136. clarifai/models/model_serving/model_config/triton/serializer.py +136 -0
  137. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  138. clarifai/models/model_serving/model_config/triton/wrappers.py +281 -0
  139. clarifai/models/model_serving/repo_build/__init__.py +14 -0
  140. clarifai/models/model_serving/repo_build/build.py +198 -0
  141. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  142. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  143. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  144. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  145. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  146. clarifai/models/model_serving/repo_build/static_files/triton/model.py +75 -0
  147. clarifai/models/model_serving/utils.py +23 -0
  148. clarifai/rag/__pycache__/__init__.cpython-311.pyc +0 -0
  149. clarifai/rag/__pycache__/rag.cpython-311.pyc +0 -0
  150. clarifai/rag/__pycache__/utils.cpython-311.pyc +0 -0
  151. clarifai/runners/__pycache__/__init__.cpython-311.pyc +0 -0
  152. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  153. clarifai/runners/models/__pycache__/__init__.cpython-311.pyc +0 -0
  154. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  155. clarifai/runners/models/__pycache__/dummy_openai_model.cpython-311.pyc +0 -0
  156. clarifai/runners/models/__pycache__/mcp_class.cpython-311.pyc +0 -0
  157. clarifai/runners/models/__pycache__/model_builder.cpython-311.pyc +0 -0
  158. clarifai/runners/models/__pycache__/model_builder.cpython-39.pyc +0 -0
  159. clarifai/runners/models/__pycache__/model_class.cpython-311.pyc +0 -0
  160. clarifai/runners/models/__pycache__/model_run_locally.cpython-311.pyc +0 -0
  161. clarifai/runners/models/__pycache__/model_runner.cpython-311.pyc +0 -0
  162. clarifai/runners/models/__pycache__/model_servicer.cpython-311.pyc +0 -0
  163. clarifai/runners/models/__pycache__/openai_class.cpython-311.pyc +0 -0
  164. clarifai/runners/models/base_typed_model.py +238 -0
  165. clarifai/runners/models/model_builder.py +274 -10
  166. clarifai/runners/models/model_runner.py +93 -76
  167. clarifai/runners/models/model_upload.py +607 -0
  168. clarifai/runners/pipeline_steps/__pycache__/__init__.cpython-311.pyc +0 -0
  169. clarifai/runners/pipeline_steps/__pycache__/pipeline_step_builder.cpython-311.pyc +0 -0
  170. clarifai/runners/pipeline_steps/pipeline_step_builder.py +10 -1
  171. clarifai/runners/pipelines/__pycache__/__init__.cpython-311.pyc +0 -0
  172. clarifai/runners/pipelines/__pycache__/pipeline_builder.cpython-311.pyc +0 -0
  173. clarifai/runners/server.py +1 -1
  174. clarifai/runners/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  175. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  176. clarifai/runners/utils/__pycache__/code_script.cpython-311.pyc +0 -0
  177. clarifai/runners/utils/__pycache__/code_script.cpython-39.pyc +0 -0
  178. clarifai/runners/utils/__pycache__/const.cpython-311.pyc +0 -0
  179. clarifai/runners/utils/__pycache__/data_utils.cpython-311.pyc +0 -0
  180. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  181. clarifai/runners/utils/__pycache__/loader.cpython-311.pyc +0 -0
  182. clarifai/runners/utils/__pycache__/method_signatures.cpython-311.pyc +0 -0
  183. clarifai/runners/utils/__pycache__/model_utils.cpython-311.pyc +0 -0
  184. clarifai/runners/utils/__pycache__/openai_convertor.cpython-311.pyc +0 -0
  185. clarifai/runners/utils/__pycache__/pipeline_validation.cpython-311.pyc +0 -0
  186. clarifai/runners/utils/__pycache__/serializers.cpython-311.pyc +0 -0
  187. clarifai/runners/utils/__pycache__/url_fetcher.cpython-311.pyc +0 -0
  188. clarifai/runners/utils/data_handler.py +231 -0
  189. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-311.pyc +0 -0
  190. clarifai/runners/utils/data_types/__pycache__/__init__.cpython-39.pyc +0 -0
  191. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-311.pyc +0 -0
  192. clarifai/runners/utils/data_types/__pycache__/data_types.cpython-39.pyc +0 -0
  193. clarifai/runners/utils/data_types.py +471 -0
  194. clarifai/runners/utils/loader.py +24 -4
  195. clarifai/runners/utils/temp.py +59 -0
  196. clarifai/schema/__pycache__/search.cpython-311.pyc +0 -0
  197. clarifai/urls/__pycache__/helper.cpython-311.pyc +0 -0
  198. clarifai/urls/__pycache__/helper.cpython-39.pyc +0 -0
  199. clarifai/utils/__pycache__/__init__.cpython-311.pyc +0 -0
  200. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  201. clarifai/utils/__pycache__/cli.cpython-311.pyc +0 -0
  202. clarifai/utils/__pycache__/cli.cpython-39.pyc +0 -0
  203. clarifai/utils/__pycache__/config.cpython-311.pyc +0 -0
  204. clarifai/utils/__pycache__/config.cpython-39.pyc +0 -0
  205. clarifai/utils/__pycache__/constants.cpython-311.pyc +0 -0
  206. clarifai/utils/__pycache__/constants.cpython-39.pyc +0 -0
  207. clarifai/utils/__pycache__/logging.cpython-311.pyc +0 -0
  208. clarifai/utils/__pycache__/logging.cpython-39.pyc +0 -0
  209. clarifai/utils/__pycache__/misc.cpython-311.pyc +0 -0
  210. clarifai/utils/__pycache__/misc.cpython-39.pyc +0 -0
  211. clarifai/utils/__pycache__/model_train.cpython-311.pyc +0 -0
  212. clarifai/utils/__pycache__/protobuf.cpython-311.pyc +0 -0
  213. clarifai/utils/__pycache__/protobuf.cpython-39.pyc +0 -0
  214. clarifai/utils/__pycache__/secrets.cpython-311.pyc +0 -0
  215. clarifai/utils/evaluation/__pycache__/__init__.cpython-311.pyc +0 -0
  216. clarifai/utils/evaluation/__pycache__/helpers.cpython-311.pyc +0 -0
  217. clarifai/utils/evaluation/__pycache__/main.cpython-311.pyc +0 -0
  218. clarifai/utils/evaluation/__pycache__/testset_annotation_parser.cpython-311.pyc +0 -0
  219. clarifai/utils/misc.py +20 -0
  220. clarifai/utils/secrets.py +96 -1
  221. clarifai/workflows/__pycache__/__init__.cpython-311.pyc +0 -0
  222. clarifai/workflows/__pycache__/export.cpython-311.pyc +0 -0
  223. clarifai/workflows/__pycache__/utils.cpython-311.pyc +0 -0
  224. clarifai/workflows/__pycache__/validate.cpython-311.pyc +0 -0
  225. {clarifai-11.7.4.dist-info → clarifai-11.7.5rc1.dist-info}/METADATA +1 -1
  226. clarifai-11.7.5rc1.dist-info/RECORD +339 -0
  227. {clarifai-11.7.4.dist-info → clarifai-11.7.5rc1.dist-info}/WHEEL +1 -1
  228. clarifai-11.7.4.dist-info/RECORD +0 -129
  229. {clarifai-11.7.4.dist-info → clarifai-11.7.5rc1.dist-info}/entry_points.txt +0 -0
  230. {clarifai-11.7.4.dist-info → clarifai-11.7.5rc1.dist-info}/licenses/LICENSE +0 -0
  231. {clarifai-11.7.4.dist-info → clarifai-11.7.5rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,243 @@
1
+ """Templates for model initialization."""
2
+
3
+ from clarifai import __version__
4
+
5
+
6
+ def get_model_class_template() -> str:
7
+ """Return the template for a basic ModelClass-based model."""
8
+ return '''from typing import Iterator, List
9
+ from clarifai.runners.models.model_class import ModelClass
10
+ from clarifai.runners.util.data_utils import Param
11
+
12
+ class MyModel(ModelClass):
13
+ """A custom model implementation using ModelClass."""
14
+
15
+ def load_model(self):
16
+ """Load the model here.
17
+ # TODO: please fill in
18
+ # Add your model loading logic here
19
+ """
20
+ pass
21
+
22
+ @ModelClass.method
23
+ def predict(
24
+ self,
25
+ prompt: str = "",
26
+ chat_history: List[dict] = None,
27
+ max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
28
+ temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
29
+ top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
30
+ ) -> str:
31
+ """This is the method that will be called when the runner is run. It takes in an input and returns an output."""
32
+ # TODO: please fill in
33
+ # Implement your prediction logic here
34
+ pass # Replace with your actual logic
35
+
36
+ @ModelClass.method
37
+ def generate(
38
+ self,
39
+ prompt: str = "",
40
+ chat_history: List[dict] = None,
41
+ max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
42
+ temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
43
+ top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
44
+ ) -> Iterator[str]:
45
+ """Example yielding a streamed response."""
46
+ # TODO: please fill in
47
+ # Implement your generation logic here
48
+ pass # Replace with your actual logic
49
+ '''
50
+
51
+
52
+ def get_mcp_model_class_template() -> str:
53
+ """Return the template for an MCPModelClass-based model."""
54
+ return '''from typing import Any
55
+
56
+ from fastmcp import FastMCP # use fastmcp v2 not the built in mcp
57
+ from pydantic import Field
58
+
59
+ from clarifai.runners.models.mcp_class import MCPModelClass
60
+
61
+ # TODO: please fill in
62
+ # Configure your FastMCP server
63
+ server = FastMCP("my-mcp-server", instructions="", stateless_http=True)
64
+
65
+
66
+ # TODO: please fill in
67
+ # Add your tools, resources, and prompts here
68
+ @server.tool("example_tool", description="An example tool")
69
+ def example_tool(input_param: Any = Field(description="Example input parameter")):
70
+ """Example tool implementation."""
71
+ # TODO: please fill in
72
+ # Implement your tool logic here
73
+ return f"Processed: {input_param}"
74
+
75
+
76
+ # Static resource example
77
+ @server.resource("config://version")
78
+ def get_version():
79
+ """Example static resource."""
80
+ # TODO: please fill in
81
+ # Return your resource data
82
+ return "1.0.0"
83
+
84
+
85
+ @server.prompt()
86
+ def example_prompt(text: str) -> str:
87
+ """Example prompt template."""
88
+ # TODO: please fill in
89
+ # Define your prompt template
90
+ return f"Process this text: {text}"
91
+
92
+
93
+ class MyModel(MCPModelClass):
94
+ """A custom model implementation using MCPModelClass."""
95
+
96
+ def get_server(self) -> FastMCP:
97
+ """Return the FastMCP server instance."""
98
+ return server
99
+ '''
100
+
101
+
102
+ def get_openai_model_class_template() -> str:
103
+ """Return the template for an OpenAIModelClass-based model."""
104
+ return '''from typing import List
105
+ from openai import OpenAI
106
+ from clarifai.runners.models.openai_class import OpenAIModelClass
107
+ from clarifai.runners.util.data_utils import Param
108
+ from clarifai.runners.utils.openai_convertor import build_openai_messages
109
+
110
+ class MyModel(OpenAIModelClass):
111
+ """A custom model implementation using OpenAIModelClass."""
112
+
113
+ # TODO: please fill in
114
+ # Configure your OpenAI-compatible client for local model
115
+ client = OpenAI(
116
+ api_key="local-key", # TODO: please fill in - use your local API key
117
+ base_url="http://localhost:8000/v1", # TODO: please fill in - your local model server endpoint
118
+ )
119
+
120
+ # TODO: please fill in
121
+ # Specify the model name to use
122
+ model = "my-local-model" # TODO: please fill in - replace with your local model name
123
+
124
+ def load_model(self):
125
+ """Optional: Add any additional model loading logic here."""
126
+ # TODO: please fill in (optional)
127
+ # Add any initialization logic if needed
128
+ pass
129
+
130
+ @OpenAIModelClass.method
131
+ def predict(
132
+ self,
133
+ prompt: str = "",
134
+ chat_history: List[dict] = None,
135
+ max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
136
+ temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
137
+ top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
138
+ ) -> str:
139
+ """Run a single prompt completion using the OpenAI client."""
140
+ # TODO: please fill in
141
+ # Implement your prediction logic here
142
+ messages = build_openai_messages(prompt, chat_history)
143
+ response = self.client.chat.completions.create(
144
+ model=self.model,
145
+ messages=messages,
146
+ max_completion_tokens=max_tokens,
147
+ temperature=temperature,
148
+ top_p=top_p,
149
+ )
150
+ return response.choices[0].message.content
151
+
152
+ @OpenAIModelClass.method
153
+ def generate(
154
+ self,
155
+ prompt: str = "",
156
+ chat_history: List[dict] = None,
157
+ max_tokens: int = Param(default=256, description="The maximum number of tokens to generate. Shorter token lengths will provide faster performance."),
158
+ temperature: float = Param(default=1.0, description="A decimal number that determines the degree of randomness in the response"),
159
+ top_p: float = Param(default=1.0, description="An alternative to sampling with temperature, where the model considers the results of the tokens with top_p probability mass."),
160
+ ):
161
+ """Stream a completion response using the OpenAI client."""
162
+ # TODO: please fill in
163
+ # Implement your streaming logic here
164
+ messages = build_openai_messages(prompt, chat_history)
165
+ stream = self.client.chat.completions.create(
166
+ model=self.model,
167
+ messages=messages,
168
+ max_completion_tokens=max_tokens,
169
+ temperature=temperature,
170
+ top_p=top_p,
171
+ stream=True,
172
+ )
173
+ for chunk in stream:
174
+ if chunk.choices:
175
+ text = (chunk.choices[0].delta.content
176
+ if (chunk and chunk.choices[0].delta.content) is not None else '')
177
+ yield text
178
+ '''
179
+
180
+
181
+ def get_config_template(model_type_id: str = "text-to-text") -> str:
182
+ """Return the template for config.yaml."""
183
+ return f'''# Configuration file for your Clarifai model
184
+
185
+ model:
186
+ id: "my-model" # TODO: please fill in - replace with your model ID
187
+ user_id: "user_id" # TODO: please fill in - replace with your user ID
188
+ app_id: "app_id" # TODO: please fill in - replace with your app ID
189
+ model_type_id: "{model_type_id}" # TODO: please fill in - replace if different model type ID
190
+
191
+ build_info:
192
+ python_version: "3.12"
193
+
194
+ # TODO: please fill in - adjust compute requirements for your model
195
+ inference_compute_info:
196
+ cpu_limit: "1" # TODO: please fill in - Amount of CPUs to use as a limit
197
+ cpu_memory: "1Gi" # TODO: please fill in - Amount of CPU memory to use as a limit
198
+ cpu_requests: "0.5" # TODO: please fill in - Amount of CPUs to use as a minimum
199
+ cpu_memory_requests: "512Mi" # TODO: please fill in - Amount of CPU memory to use as a minimum
200
+ num_accelerators: 1 # TODO: please fill in - Amount of GPU/TPUs to use
201
+ accelerator_type: ["NVIDIA-*"] # TODO: please fill in - type of accelerators requested
202
+ accelerator_memory: "1Gi" # TODO: please fill in - Amount of accelerator/GPU memory to use as a minimum
203
+
204
+ # TODO: please fill in (optional) - add checkpoints section if needed
205
+ # checkpoints:
206
+ # type: "huggingface" # supported type
207
+ # repo_id: "your-model-repo" # for huggingface
208
+ # when: "build" # or "runtime", "upload"
209
+ '''
210
+
211
+
212
+ def get_requirements_template(model_type_id: str = None) -> str:
213
+ """Return the template for requirements.txt."""
214
+ requirements = f'''# Clarifai SDK - required
215
+ clarifai>={__version__}
216
+ '''
217
+ if model_type_id == "mcp":
218
+ requirements += "fastmcp\n"
219
+ elif model_type_id == "openai":
220
+ requirements += "openai\n"
221
+ requirements += '''
222
+ # TODO: please fill in - add your model's dependencies here
223
+ # Examples:
224
+ # torch>=2.0.0
225
+ # transformers>=4.30.0
226
+ # numpy>=1.21.0
227
+ # pillow>=9.0.0
228
+ '''
229
+ return requirements
230
+
231
+
232
+ # Mapping of model type IDs to their corresponding templates
233
+ MODEL_TYPE_TEMPLATES = {
234
+ "mcp": get_mcp_model_class_template,
235
+ "openai": get_openai_model_class_template,
236
+ }
237
+
238
+
239
+ def get_model_template(model_type_id: str = None) -> str:
240
+ """Get the appropriate model template based on model_type_id."""
241
+ if model_type_id in MODEL_TYPE_TEMPLATES:
242
+ return MODEL_TYPE_TEMPLATES[model_type_id]()
243
+ return get_model_class_template()
clarifai/cli/pipeline.py CHANGED
@@ -229,12 +229,34 @@ def init(pipeline_path):
229
229
  # Create the pipeline directory if it doesn't exist
230
230
  os.makedirs(pipeline_path, exist_ok=True)
231
231
 
232
+ # Prompt for user inputs
233
+ click.echo("Welcome to Clarifai Pipeline Initialization!")
234
+ click.echo("Please provide the following information:")
235
+
236
+ user_id = click.prompt("User ID", type=str)
237
+ app_id = click.prompt("App ID", type=str)
238
+ pipeline_id = click.prompt("Pipeline ID", default="hello-world-pipeline", type=str)
239
+ num_steps = click.prompt("Number of pipeline steps", default=2, type=int)
240
+
241
+ # Get step names
242
+ step_names = []
243
+ default_names = ["stepA", "stepB", "stepC", "stepD", "stepE", "stepF"]
244
+
245
+ for i in range(num_steps):
246
+ default_name = default_names[i] if i < len(default_names) else f"step{i + 1}"
247
+ step_name = click.prompt(f"Name for step {i + 1}", default=default_name, type=str)
248
+ step_names.append(step_name)
249
+
250
+ click.echo(f"\nCreating pipeline '{pipeline_id}' with steps: {', '.join(step_names)}")
251
+
232
252
  # Create pipeline config.yaml
233
253
  config_path = os.path.join(pipeline_path, "config.yaml")
234
254
  if os.path.exists(config_path):
235
255
  logger.warning(f"File {config_path} already exists, skipping...")
236
256
  else:
237
- config_template = get_pipeline_config_template()
257
+ config_template = get_pipeline_config_template(
258
+ pipeline_id=pipeline_id, user_id=user_id, app_id=app_id, step_names=step_names
259
+ )
238
260
  with open(config_path, 'w', encoding='utf-8') as f:
239
261
  f.write(config_template)
240
262
  logger.info(f"Created {config_path}")
@@ -249,8 +271,8 @@ def init(pipeline_path):
249
271
  f.write(readme_template)
250
272
  logger.info(f"Created {readme_path}")
251
273
 
252
- # Create pipeline steps (stepA and stepB)
253
- for step_id in ["stepA", "stepB"]:
274
+ # Create pipeline steps
275
+ for step_id in step_names:
254
276
  step_dir = os.path.join(pipeline_path, step_id)
255
277
  os.makedirs(step_dir, exist_ok=True)
256
278
 
@@ -263,7 +285,9 @@ def init(pipeline_path):
263
285
  if os.path.exists(step_config_path):
264
286
  logger.warning(f"File {step_config_path} already exists, skipping...")
265
287
  else:
266
- step_config_template = get_pipeline_step_config_template(step_id)
288
+ step_config_template = get_pipeline_step_config_template(
289
+ step_id=step_id, user_id=user_id, app_id=app_id
290
+ )
267
291
  with open(step_config_path, 'w', encoding='utf-8') as f:
268
292
  f.write(step_config_template)
269
293
  logger.info(f"Created {step_config_path}")
@@ -290,13 +314,9 @@ def init(pipeline_path):
290
314
 
291
315
  logger.info(f"Pipeline initialization complete in {pipeline_path}")
292
316
  logger.info("Next steps:")
293
- logger.info("1. Search for '# TODO: please fill in' comments in the generated files")
294
- logger.info("2. Update your user_id and app_id in all config.yaml files")
295
- logger.info(
296
- "3. Implement your pipeline step logic in stepA/1/pipeline_step.py and stepB/1/pipeline_step.py"
297
- )
298
- logger.info("4. Add dependencies to requirements.txt files as needed")
299
- logger.info("5. Run 'clarifai pipeline upload config.yaml' to upload your pipeline")
317
+ logger.info("1. Implement your pipeline step logic in the generated pipeline_step.py files")
318
+ logger.info("2. Add dependencies to requirements.txt files as needed")
319
+ logger.info("3. Run 'clarifai pipeline upload config.yaml' to upload your pipeline")
300
320
 
301
321
 
302
322
  @pipeline.command(['ls'])
@@ -0,0 +1,64 @@
1
+ """Templates for initializing pipeline step projects."""
2
+
3
+ from clarifai.versions import CLIENT_VERSION
4
+
5
+
6
+ def get_config_template():
7
+ """Get the config.yaml template for pipeline steps."""
8
+ return """pipeline_step:
9
+ id: "text-classifier-train-upload-step" # TODO: please fill in
10
+ user_id: "your_user_id" # TODO: please fill in
11
+ app_id: "your_app_id" # TODO: please fill in
12
+
13
+ pipeline_step_input_params:
14
+ - name: param_a
15
+ - name: param_b
16
+ default: "param_b_allowed_value1"
17
+ description: "param_b is the second parameter of the pipeline step"
18
+ accepted_values: # list of accepted values for param_b
19
+ - "param_b_allowed_value1"
20
+ - "param_b_allowed_value2"
21
+ - "param_b_allowed_value3"
22
+
23
+ build_info:
24
+ python_version: "3.12"
25
+
26
+ pipeline_step_compute_info:
27
+ cpu_limit: "500m"
28
+ cpu_memory: "500Mi"
29
+ num_accelerators: 0
30
+ """
31
+
32
+
33
+ def get_pipeline_step_template():
34
+ """Get the pipeline_step.py template."""
35
+ return '''import argparse
36
+
37
+ import clarifai
38
+
39
+
40
+ def main():
41
+ parser = argparse.ArgumentParser(description='Concatenate two strings.')
42
+ parser.add_argument('--param_a', type=str, required=True, help='First string to concatenate')
43
+ parser.add_argument('--param_b', type=str, required=True, help='Second string to concatenate')
44
+
45
+ args = parser.parse_args()
46
+
47
+ print(clarifai.__version__)
48
+
49
+ print(f"Concatenation Output: {args.param_a + args.param_b}")
50
+
51
+
52
+ if __name__ == "__main__":
53
+ main()
54
+ '''
55
+
56
+
57
+ def get_requirements_template():
58
+ """Get the requirements.txt template."""
59
+ return f'''clarifai=={CLIENT_VERSION}
60
+ # Add your pipeline step dependencies here
61
+ # Example:
62
+ # torch>=1.9.0
63
+ # transformers>=4.20.0
64
+ '''
@@ -3,43 +3,55 @@
3
3
  from clarifai.versions import CLIENT_VERSION
4
4
 
5
5
 
6
- def get_pipeline_config_template():
6
+ def get_pipeline_config_template(
7
+ pipeline_id="hello-world-pipeline",
8
+ user_id="your_user_id",
9
+ app_id="your_app_id",
10
+ step_names=None,
11
+ ):
7
12
  """Get the config.yaml template for pipelines."""
8
- return """pipeline:
9
- id: "hello-world-pipeline" # TODO: please fill in
10
- user_id: "your_user_id" # TODO: please fill in
11
- app_id: "your_app_id" # TODO: please fill in
13
+ if step_names is None:
14
+ step_names = ["stepA", "stepB"]
15
+
16
+ step_directories = "\n".join(f" - {step}" for step in step_names)
17
+
18
+ # Generate step templates for orchestration
19
+ step_templates = []
20
+ for i, step_name in enumerate(step_names):
21
+ step_templates.append(f""" - - name: step-{i}
22
+ templateRef:
23
+ name: users/{user_id}/apps/{app_id}/pipeline_steps/{step_name}
24
+ template: users/{user_id}/apps/{app_id}/pipeline_steps/{step_name}""")
25
+
26
+ steps_yaml = "\n".join(step_templates)
27
+
28
+ return f"""pipeline:
29
+ id: "{pipeline_id}"
30
+ user_id: "{user_id}"
31
+ app_id: "{app_id}"
12
32
  step_directories:
13
- - stepA
14
- - stepB
33
+ {step_directories}
15
34
  orchestration_spec:
16
35
  argo_orchestration_spec: |
17
36
  apiVersion: argoproj.io/v1alpha1
18
37
  kind: Workflow
19
38
  metadata:
20
- generateName: hello-world-pipeline-
39
+ generateName: {pipeline_id}-
21
40
  spec:
22
41
  entrypoint: sequence
23
42
  templates:
24
43
  - name: sequence
25
44
  steps:
26
- - - name: step-a
27
- templateRef:
28
- name: users/your_user_id/apps/your_app_id/pipeline_steps/stepA # TODO: please fill in
29
- template: users/your_user_id/apps/your_app_id/pipeline_steps/stepA # TODO: please fill in
30
- - - name: step-b
31
- templateRef:
32
- name: users/your_user_id/apps/your_app_id/pipeline_steps/stepB # TODO: please fill in
33
- template: users/your_user_id/apps/your_app_id/pipeline_steps/stepB # TODO: please fill in
45
+ {steps_yaml}
34
46
  """
35
47
 
36
48
 
37
- def get_pipeline_step_config_template(step_id: str):
49
+ def get_pipeline_step_config_template(step_id: str, user_id="your_user_id", app_id="your_app_id"):
38
50
  """Get the config.yaml template for a pipeline step."""
39
51
  return f"""pipeline_step:
40
- id: "{step_id}" # TODO: please fill in
41
- user_id: "your_user_id" # TODO: please fill in
42
- app_id: "your_app_id" # TODO: please fill in
52
+ id: "{step_id}"
53
+ user_id: "{user_id}"
54
+ app_id: "{app_id}"
43
55
 
44
56
  pipeline_step_input_params:
45
57
  - name: input_text
@@ -114,17 +126,11 @@ This project contains a Clarifai pipeline with associated pipeline steps.
114
126
 
115
127
  ## Getting Started
116
128
 
117
- 1. **Configure the pipeline**: Edit `config.yaml` and update the TODO fields:
118
- - Set your `user_id` and `app_id`
119
- - Update the pipeline `id`
120
- - Modify the Argo orchestration spec as needed
121
-
122
- 2. **Configure pipeline steps**: For each step directory (stepA, stepB):
123
- - Edit `config.yaml` and fill in the TODO fields
129
+ 1. **Implement pipeline steps**: For each step directory:
124
130
  - Update `requirements.txt` with your dependencies
125
131
  - Implement your logic in `1/pipeline_step.py`
126
132
 
127
- 3. **Upload the pipeline**:
133
+ 2. **Upload the pipeline**:
128
134
  ```bash
129
135
  clarifai pipeline upload config.yaml
130
136
  ```
@@ -193,6 +193,7 @@ class Pipeline(Lister, BaseClient):
193
193
  """
194
194
  start_time = time.time()
195
195
  seen_logs = set()
196
+ current_page = 1 # Track current page for log pagination.
196
197
 
197
198
  while time.time() - start_time < timeout:
198
199
  # Get run status
@@ -217,8 +218,8 @@ class Pipeline(Lister, BaseClient):
217
218
  pipeline_run, preserving_proto_field_name=True
218
219
  )
219
220
 
220
- # Display new log entries
221
- self._display_new_logs(run_id, seen_logs)
221
+ # Display new log entries and update current page
222
+ current_page = self._display_new_logs(run_id, seen_logs, current_page)
222
223
 
223
224
  elapsed_time = time.time() - start_time
224
225
  logger.info(f"Pipeline run monitoring... (elapsed {elapsed_time:.1f}s)")
@@ -276,12 +277,16 @@ class Pipeline(Lister, BaseClient):
276
277
  logger.error(f"Pipeline run timed out after {timeout} seconds")
277
278
  return {"status": "timeout"}
278
279
 
279
- def _display_new_logs(self, run_id: str, seen_logs: set):
280
+ def _display_new_logs(self, run_id: str, seen_logs: set, current_page: int = 1) -> int:
280
281
  """Display new log entries for a pipeline version run.
281
282
 
282
283
  Args:
283
284
  run_id (str): The pipeline version run ID.
284
285
  seen_logs (set): Set of already seen log entry IDs.
286
+ current_page (int): The current page to fetch logs from.
287
+
288
+ Returns:
289
+ int: The next page number to fetch from in subsequent calls.
285
290
  """
286
291
  try:
287
292
  logs_request = service_pb2.ListLogEntriesRequest()
@@ -290,7 +295,7 @@ class Pipeline(Lister, BaseClient):
290
295
  logs_request.pipeline_version_id = self.pipeline_version_id or ""
291
296
  logs_request.pipeline_version_run_id = run_id
292
297
  logs_request.log_type = "pipeline.version.run" # Set required log type
293
- logs_request.page = 1
298
+ logs_request.page = current_page
294
299
  logs_request.per_page = 50
295
300
 
296
301
  logs_response = self.STUB.ListLogEntries(
@@ -298,7 +303,9 @@ class Pipeline(Lister, BaseClient):
298
303
  )
299
304
 
300
305
  if logs_response.status.code == status_code_pb2.StatusCode.SUCCESS:
306
+ entries_count = 0
301
307
  for log_entry in logs_response.log_entries:
308
+ entries_count += 1
302
309
  # Use log entry URL or timestamp as unique identifier
303
310
  log_id = log_entry.url or f"{log_entry.created_at.seconds}_{log_entry.message}"
304
311
  if log_id not in seen_logs:
@@ -312,5 +319,14 @@ class Pipeline(Lister, BaseClient):
312
319
  else:
313
320
  logger.info(log_message)
314
321
 
322
+ # If we got a full page (50 entries), there might be more logs on the next page
323
+ # If we got fewer than 50 entries, we've reached the end and should stay on current page
324
+ if entries_count == 50:
325
+ return current_page + 1
326
+ else:
327
+ return current_page
328
+
315
329
  except Exception as e:
316
330
  logger.debug(f"Error fetching logs: {e}")
331
+ # Return current page on error to retry the same page next fetch
332
+ return current_page