clarifai 11.5.5__tar.gz → 11.6.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. {clarifai-11.5.5/clarifai.egg-info → clarifai-11.6.0}/PKG-INFO +2 -2
  2. clarifai-11.6.0/clarifai/__init__.py +1 -0
  3. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/model.py +10 -2
  4. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/pipeline.py +140 -0
  5. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/__init__.py +2 -0
  6. clarifai-11.6.0/clarifai/client/pipeline.py +312 -0
  7. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/model_builder.py +29 -4
  8. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/model_class.py +24 -9
  9. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/openai_class.py +1 -0
  10. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/server.py +1 -1
  11. {clarifai-11.5.5 → clarifai-11.6.0/clarifai.egg-info}/PKG-INFO +2 -2
  12. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai.egg-info/SOURCES.txt +2 -0
  13. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai.egg-info/requires.txt +1 -1
  14. {clarifai-11.5.5 → clarifai-11.6.0}/requirements.txt +1 -1
  15. clarifai-11.6.0/tests/test_pipeline_client.py +262 -0
  16. clarifai-11.5.5/clarifai/__init__.py +0 -1
  17. {clarifai-11.5.5 → clarifai-11.6.0}/LICENSE +0 -0
  18. {clarifai-11.5.5 → clarifai-11.6.0}/MANIFEST.in +0 -0
  19. {clarifai-11.5.5 → clarifai-11.6.0}/README.md +0 -0
  20. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/README.md +0 -0
  21. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/__init__.py +0 -0
  22. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/__main__.py +0 -0
  23. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/base.py +0 -0
  24. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/compute_cluster.py +0 -0
  25. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/deployment.py +0 -0
  26. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/nodepool.py +0 -0
  27. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/pipeline_step.py +0 -0
  28. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/templates/__init__.py +0 -0
  29. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/templates/model_templates.py +0 -0
  30. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/templates/pipeline_step_templates.py +0 -0
  31. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli/templates/pipeline_templates.py +0 -0
  32. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/cli.py +0 -0
  33. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/app.py +0 -0
  34. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/auth/__init__.py +0 -0
  35. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/auth/helper.py +0 -0
  36. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/auth/register.py +0 -0
  37. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/auth/stub.py +0 -0
  38. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/base.py +0 -0
  39. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/compute_cluster.py +0 -0
  40. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/dataset.py +0 -0
  41. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/deployment.py +0 -0
  42. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/input.py +0 -0
  43. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/lister.py +0 -0
  44. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/model.py +0 -0
  45. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/model_client.py +0 -0
  46. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/module.py +0 -0
  47. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/nodepool.py +0 -0
  48. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/runner.py +0 -0
  49. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/search.py +0 -0
  50. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/user.py +0 -0
  51. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/client/workflow.py +0 -0
  52. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/base.py +0 -0
  53. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/dataset.py +0 -0
  54. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/input.py +0 -0
  55. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/model.py +0 -0
  56. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/rag.py +0 -0
  57. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/search.py +0 -0
  58. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/constants/workflow.py +0 -0
  59. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/__init__.py +0 -0
  60. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/export/__init__.py +0 -0
  61. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/export/inputs_annotations.py +0 -0
  62. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/__init__.py +0 -0
  63. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/base.py +0 -0
  64. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/features.py +0 -0
  65. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/image.py +0 -0
  66. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/README.md +0 -0
  67. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/__init__.py +0 -0
  68. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/coco_captions.py +0 -0
  69. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/coco_detection.py +0 -0
  70. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/imagenet_classification.py +0 -0
  71. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/loaders/xview_detection.py +0 -0
  72. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/multimodal.py +0 -0
  73. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/text.py +0 -0
  74. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/datasets/upload/utils.py +0 -0
  75. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/errors.py +0 -0
  76. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/models/__init__.py +0 -0
  77. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/models/api.py +0 -0
  78. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/modules/README.md +0 -0
  79. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/modules/__init__.py +0 -0
  80. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/modules/css.py +0 -0
  81. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/modules/pages.py +0 -0
  82. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/modules/style.css +0 -0
  83. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/rag/__init__.py +0 -0
  84. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/rag/rag.py +0 -0
  85. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/rag/utils.py +0 -0
  86. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/__init__.py +0 -0
  87. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/dockerfile_template/Dockerfile.template +0 -0
  88. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/__init__.py +0 -0
  89. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/dummy_openai_model.py +0 -0
  90. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/mcp_class.py +0 -0
  91. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/model_run_locally.py +0 -0
  92. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/model_runner.py +0 -0
  93. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/model_servicer.py +0 -0
  94. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/visual_classifier_class.py +0 -0
  95. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/models/visual_detector_class.py +0 -0
  96. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/pipeline_steps/__init__.py +0 -0
  97. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/pipeline_steps/pipeline_step_builder.py +0 -0
  98. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/pipelines/__init__.py +0 -0
  99. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/pipelines/pipeline_builder.py +0 -0
  100. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/__init__.py +0 -0
  101. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/code_script.py +0 -0
  102. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/const.py +0 -0
  103. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/data_types/__init__.py +0 -0
  104. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/data_types/data_types.py +0 -0
  105. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/data_utils.py +0 -0
  106. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/loader.py +0 -0
  107. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/method_signatures.py +0 -0
  108. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/model_utils.py +0 -0
  109. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/openai_convertor.py +0 -0
  110. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/pipeline_validation.py +0 -0
  111. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/serializers.py +0 -0
  112. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/runners/utils/url_fetcher.py +0 -0
  113. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/schema/search.py +0 -0
  114. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/urls/helper.py +0 -0
  115. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/__init__.py +0 -0
  116. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/cli.py +0 -0
  117. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/config.py +0 -0
  118. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/constants.py +0 -0
  119. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/evaluation/__init__.py +0 -0
  120. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/evaluation/helpers.py +0 -0
  121. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/evaluation/main.py +0 -0
  122. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/evaluation/testset_annotation_parser.py +0 -0
  123. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/logging.py +0 -0
  124. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/misc.py +0 -0
  125. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/model_train.py +0 -0
  126. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/utils/protobuf.py +0 -0
  127. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/versions.py +0 -0
  128. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/workflows/__init__.py +0 -0
  129. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/workflows/export.py +0 -0
  130. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/workflows/utils.py +0 -0
  131. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai/workflows/validate.py +0 -0
  132. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai.egg-info/dependency_links.txt +0 -0
  133. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai.egg-info/entry_points.txt +0 -0
  134. {clarifai-11.5.5 → clarifai-11.6.0}/clarifai.egg-info/top_level.txt +0 -0
  135. {clarifai-11.5.5 → clarifai-11.6.0}/pyproject.toml +0 -0
  136. {clarifai-11.5.5 → clarifai-11.6.0}/setup.cfg +0 -0
  137. {clarifai-11.5.5 → clarifai-11.6.0}/setup.py +0 -0
  138. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_app.py +0 -0
  139. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_async_stub.py +0 -0
  140. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_auth.py +0 -0
  141. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_data_upload.py +0 -0
  142. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_eval.py +0 -0
  143. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_list_models.py +0 -0
  144. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_misc.py +0 -0
  145. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_model_predict.py +0 -0
  146. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_model_train.py +0 -0
  147. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_modules.py +0 -0
  148. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_rag.py +0 -0
  149. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_search.py +0 -0
  150. {clarifai-11.5.5 → clarifai-11.6.0}/tests/test_stub.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: clarifai
3
- Version: 11.5.5
3
+ Version: 11.6.0
4
4
  Home-page: https://github.com/Clarifai/clarifai-python
5
5
  Author: Clarifai
6
6
  Author-email: support@clarifai.com
@@ -20,7 +20,7 @@ Requires-Python: >=3.8
20
20
  Description-Content-Type: text/markdown
21
21
  License-File: LICENSE
22
22
  Requires-Dist: clarifai-grpc>=11.5.5
23
- Requires-Dist: clarifai-protocol>=0.0.24
23
+ Requires-Dist: clarifai-protocol>=0.0.25
24
24
  Requires-Dist: numpy>=1.22.0
25
25
  Requires-Dist: tqdm>=4.65.0
26
26
  Requires-Dist: PyYAML>=6.0.1
@@ -0,0 +1 @@
1
+ __version__ = "11.6.0"
@@ -120,14 +120,22 @@ def init(model_path, model_type_id):
120
120
  is_flag=True,
121
121
  help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.',
122
122
  )
123
- def upload(model_path, stage, skip_dockerfile):
123
+ @click.pass_context
124
+ def upload(ctx, model_path, stage, skip_dockerfile):
124
125
  """Upload a model to Clarifai.
125
126
 
126
127
  MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
127
128
  """
128
129
  from clarifai.runners.models.model_builder import upload_model
129
130
 
130
- upload_model(model_path, stage, skip_dockerfile)
131
+ validate_context(ctx)
132
+ upload_model(
133
+ model_path,
134
+ stage,
135
+ skip_dockerfile,
136
+ pat=ctx.obj.current.pat,
137
+ base_url=ctx.obj.current.api_base,
138
+ )
131
139
 
132
140
 
133
141
  @model.command()
@@ -27,6 +27,146 @@ def upload(path):
27
27
  upload_pipeline(path)
28
28
 
29
29
 
30
+ @pipeline.command()
31
+ @click.option(
32
+ '--config',
33
+ type=click.Path(exists=True),
34
+ required=False,
35
+ help='Path to the pipeline run config file.',
36
+ )
37
+ @click.option('--pipeline_id', required=False, help='Pipeline ID to run.')
38
+ @click.option('--pipeline_version_id', required=False, help='Pipeline Version ID to run.')
39
+ @click.option(
40
+ '--pipeline_version_run_id',
41
+ required=False,
42
+ help='Pipeline Version Run ID. If not provided, a UUID will be generated.',
43
+ )
44
+ @click.option('--user_id', required=False, help='User ID of the pipeline.')
45
+ @click.option('--app_id', required=False, help='App ID that contains the pipeline.')
46
+ @click.option('--nodepool_id', required=False, help='Nodepool ID to run the pipeline on.')
47
+ @click.option(
48
+ '--compute_cluster_id', required=False, help='Compute Cluster ID to run the pipeline on.'
49
+ )
50
+ @click.option('--pipeline_url', required=False, help='Pipeline URL to run.')
51
+ @click.option(
52
+ '--timeout',
53
+ type=int,
54
+ default=3600,
55
+ help='Maximum time to wait for completion in seconds. Default 3600 (1 hour).',
56
+ )
57
+ @click.option(
58
+ '--monitor_interval',
59
+ type=int,
60
+ default=10,
61
+ help='Interval between status checks in seconds. Default 10.',
62
+ )
63
+ @click.option(
64
+ '--log_file',
65
+ type=click.Path(),
66
+ required=False,
67
+ help='Path to file where logs should be written. If not provided, logs are displayed on console.',
68
+ )
69
+ @click.option(
70
+ '--monitor',
71
+ is_flag=True,
72
+ default=False,
73
+ help='Monitor an existing pipeline run instead of starting a new one. Requires pipeline_version_run_id.',
74
+ )
75
+ @click.pass_context
76
+ def run(
77
+ ctx,
78
+ config,
79
+ pipeline_id,
80
+ pipeline_version_id,
81
+ pipeline_version_run_id,
82
+ user_id,
83
+ app_id,
84
+ nodepool_id,
85
+ compute_cluster_id,
86
+ pipeline_url,
87
+ timeout,
88
+ monitor_interval,
89
+ log_file,
90
+ monitor,
91
+ ):
92
+ """Run a pipeline and monitor its progress."""
93
+ import json
94
+
95
+ from clarifai.client.pipeline import Pipeline
96
+ from clarifai.utils.cli import from_yaml, validate_context
97
+
98
+ validate_context(ctx)
99
+
100
+ if config:
101
+ config_data = from_yaml(config)
102
+ pipeline_id = config_data.get('pipeline_id', pipeline_id)
103
+ pipeline_version_id = config_data.get('pipeline_version_id', pipeline_version_id)
104
+ pipeline_version_run_id = config_data.get(
105
+ 'pipeline_version_run_id', pipeline_version_run_id
106
+ )
107
+ user_id = config_data.get('user_id', user_id)
108
+ app_id = config_data.get('app_id', app_id)
109
+ nodepool_id = config_data.get('nodepool_id', nodepool_id)
110
+ compute_cluster_id = config_data.get('compute_cluster_id', compute_cluster_id)
111
+ pipeline_url = config_data.get('pipeline_url', pipeline_url)
112
+ timeout = config_data.get('timeout', timeout)
113
+ monitor_interval = config_data.get('monitor_interval', monitor_interval)
114
+ log_file = config_data.get('log_file', log_file)
115
+ monitor = config_data.get('monitor', monitor)
116
+
117
+ # compute_cluster_id and nodepool_id are mandatory regardless of whether pipeline_url is provided
118
+ if not compute_cluster_id or not nodepool_id:
119
+ raise ValueError("--compute_cluster_id and --nodepool_id are mandatory parameters.")
120
+
121
+ # When monitor flag is used, pipeline_version_run_id is mandatory
122
+ if monitor and not pipeline_version_run_id:
123
+ raise ValueError("--pipeline_version_run_id is required when using --monitor flag.")
124
+
125
+ if pipeline_url:
126
+ # When using pipeline_url, other parameters are optional (will be parsed from URL)
127
+ required_params_provided = True
128
+ else:
129
+ # When not using pipeline_url, all individual parameters are required
130
+ required_params_provided = all([pipeline_id, user_id, app_id, pipeline_version_id])
131
+
132
+ if not required_params_provided:
133
+ raise ValueError(
134
+ "Either --user_id & --app_id & --pipeline_id & --pipeline_version_id or --pipeline_url must be provided."
135
+ )
136
+
137
+ if pipeline_url:
138
+ pipeline = Pipeline(
139
+ url=pipeline_url,
140
+ pat=ctx.obj.current.pat,
141
+ base_url=ctx.obj.current.api_base,
142
+ pipeline_version_run_id=pipeline_version_run_id,
143
+ nodepool_id=nodepool_id,
144
+ compute_cluster_id=compute_cluster_id,
145
+ log_file=log_file,
146
+ )
147
+ else:
148
+ pipeline = Pipeline(
149
+ pipeline_id=pipeline_id,
150
+ pipeline_version_id=pipeline_version_id,
151
+ pipeline_version_run_id=pipeline_version_run_id,
152
+ user_id=user_id,
153
+ app_id=app_id,
154
+ nodepool_id=nodepool_id,
155
+ compute_cluster_id=compute_cluster_id,
156
+ pat=ctx.obj.current.pat,
157
+ base_url=ctx.obj.current.api_base,
158
+ log_file=log_file,
159
+ )
160
+
161
+ if monitor:
162
+ # Monitor existing pipeline run instead of starting new one
163
+ result = pipeline.monitor_only(timeout=timeout, monitor_interval=monitor_interval)
164
+ else:
165
+ # Start new pipeline run and monitor it
166
+ result = pipeline.run(timeout=timeout, monitor_interval=monitor_interval)
167
+ click.echo(json.dumps(result, indent=2, default=str))
168
+
169
+
30
170
  @pipeline.command()
31
171
  @click.argument(
32
172
  "pipeline_path",
@@ -7,6 +7,7 @@ from clarifai.client.input import Inputs
7
7
  from clarifai.client.lister import Lister
8
8
  from clarifai.client.model import Model
9
9
  from clarifai.client.module import Module
10
+ from clarifai.client.pipeline import Pipeline
10
11
  from clarifai.client.search import Search
11
12
  from clarifai.client.user import User
12
13
  from clarifai.client.workflow import Workflow
@@ -18,6 +19,7 @@ __all__ = [
18
19
  'App',
19
20
  'Model',
20
21
  'Workflow',
22
+ 'Pipeline',
21
23
  'Module',
22
24
  'Lister',
23
25
  'Dataset',
@@ -0,0 +1,312 @@
1
+ import time
2
+ import uuid
3
+ from typing import Dict, List
4
+
5
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
6
+ from clarifai_grpc.grpc.api.status import status_code_pb2
7
+
8
+ from clarifai.client.base import BaseClient
9
+ from clarifai.client.lister import Lister
10
+ from clarifai.errors import UserError
11
+ from clarifai.urls.helper import ClarifaiUrlHelper
12
+ from clarifai.utils.constants import DEFAULT_BASE
13
+ from clarifai.utils.logging import logger
14
+
15
+
16
+ def _get_status_name(status_code: int) -> str:
17
+ """Get the human-readable name for a status code."""
18
+ status_mapping = {
19
+ # Job status codes (these are the actual values based on the error message showing 64001)
20
+ 64001: "JOB_QUEUED",
21
+ 64002: "JOB_RUNNING",
22
+ 64003: "JOB_COMPLETED",
23
+ 64004: "JOB_FAILED",
24
+ 64005: "JOB_UNEXPECTED_ERROR",
25
+ # Standard status codes
26
+ 10000: "SUCCESS",
27
+ 10010: "MIXED_STATUS",
28
+ }
29
+ return status_mapping.get(status_code, f"UNKNOWN_STATUS_{status_code}")
30
+
31
+
32
+ class Pipeline(Lister, BaseClient):
33
+ """Pipeline is a class that provides access to Clarifai API endpoints related to Pipeline information."""
34
+
35
+ def __init__(
36
+ self,
37
+ url: str = None,
38
+ pipeline_id: str = None,
39
+ pipeline_version_id: str = None,
40
+ pipeline_version_run_id: str = None,
41
+ user_id: str = None,
42
+ app_id: str = None,
43
+ nodepool_id: str = None,
44
+ compute_cluster_id: str = None,
45
+ log_file: str = None,
46
+ base_url: str = DEFAULT_BASE,
47
+ pat: str = None,
48
+ token: str = None,
49
+ root_certificates_path: str = None,
50
+ **kwargs,
51
+ ):
52
+ """Initializes a Pipeline object.
53
+
54
+ Args:
55
+ url (str): The URL to initialize the pipeline object.
56
+ pipeline_id (str): The Pipeline ID to interact with.
57
+ pipeline_version_id (str): The Pipeline Version ID to interact with.
58
+ pipeline_version_run_id (str): The Pipeline Version Run ID. If not provided, a UUID will be generated.
59
+ user_id (str): The User ID that owns the pipeline.
60
+ app_id (str): The App ID that contains the pipeline.
61
+ nodepool_id (str): The Nodepool ID to run the pipeline on.
62
+ compute_cluster_id (str): The Compute Cluster ID to run the pipeline on.
63
+ log_file (str): Path to file where logs should be written. If not provided, logs are displayed on console.
64
+ base_url (str): Base API url. Default "https://api.clarifai.com"
65
+ pat (str): A personal access token for authentication. Can be set as env var CLARIFAI_PAT
66
+ token (str): A session token for authentication. Accepts either a session token or a pat. Can be set as env var CLARIFAI_SESSION_TOKEN
67
+ root_certificates_path (str): Path to the SSL root certificates file, used to establish secure gRPC connections.
68
+ **kwargs: Additional keyword arguments to be passed to the Pipeline.
69
+ """
70
+ if url and pipeline_id:
71
+ raise UserError("You can only specify one of url or pipeline_id.")
72
+ if not url and not pipeline_id:
73
+ raise UserError("You must specify one of url or pipeline_id.")
74
+ if url:
75
+ parsed_user_id, parsed_app_id, _, parsed_pipeline_id, parsed_version_id = (
76
+ ClarifaiUrlHelper.split_clarifai_url(url)
77
+ )
78
+ user_id = user_id or parsed_user_id
79
+ app_id = app_id or parsed_app_id
80
+ pipeline_id = parsed_pipeline_id
81
+ pipeline_version_id = pipeline_version_id or parsed_version_id
82
+
83
+ self.pipeline_id = pipeline_id
84
+ self.pipeline_version_id = pipeline_version_id
85
+ self.pipeline_version_run_id = pipeline_version_run_id or str(uuid.uuid4())
86
+ self.user_id = user_id
87
+ self.app_id = app_id
88
+ self.nodepool_id = nodepool_id
89
+ self.compute_cluster_id = compute_cluster_id
90
+ self.log_file = log_file
91
+
92
+ BaseClient.__init__(
93
+ self,
94
+ user_id=user_id,
95
+ app_id=app_id,
96
+ base=base_url,
97
+ pat=pat,
98
+ token=token,
99
+ root_certificates_path=root_certificates_path,
100
+ )
101
+ Lister.__init__(self)
102
+
103
+ # Set up runner selector if compute cluster and nodepool are provided
104
+ self._runner_selector = None
105
+ if self.compute_cluster_id and self.nodepool_id:
106
+ from clarifai.client.nodepool import Nodepool
107
+
108
+ self._runner_selector = Nodepool.get_runner_selector(
109
+ user_id=self.user_id,
110
+ compute_cluster_id=self.compute_cluster_id,
111
+ nodepool_id=self.nodepool_id,
112
+ )
113
+
114
+ def run(self, inputs: List = None, timeout: int = 3600, monitor_interval: int = 10) -> Dict:
115
+ """Run the pipeline and monitor its progress.
116
+
117
+ Args:
118
+ inputs (List): List of inputs to run the pipeline with. If None, runs without inputs.
119
+ timeout (int): Maximum time to wait for completion in seconds. Default 3600 (1 hour).
120
+ monitor_interval (int): Interval between status checks in seconds. Default 10.
121
+
122
+ Returns:
123
+ Dict: The pipeline run result.
124
+ """
125
+ # Create a new pipeline version run
126
+ pipeline_version_run = resources_pb2.PipelineVersionRun()
127
+ pipeline_version_run.id = self.pipeline_version_run_id
128
+
129
+ # Set nodepools if nodepool information is available
130
+ if self.nodepool_id and self.compute_cluster_id:
131
+ nodepool = resources_pb2.Nodepool(
132
+ id=self.nodepool_id,
133
+ compute_cluster=resources_pb2.ComputeCluster(
134
+ id=self.compute_cluster_id, user_id=self.user_id
135
+ ),
136
+ )
137
+ pipeline_version_run.nodepools.extend([nodepool])
138
+
139
+ run_request = service_pb2.PostPipelineVersionRunsRequest()
140
+ run_request.user_app_id.CopyFrom(self.user_app_id)
141
+ run_request.pipeline_id = self.pipeline_id
142
+ run_request.pipeline_version_id = self.pipeline_version_id or ""
143
+ run_request.pipeline_version_runs.append(pipeline_version_run)
144
+
145
+ # Add runner selector if available
146
+ if self._runner_selector:
147
+ run_request.runner_selector.CopyFrom(self._runner_selector)
148
+
149
+ logger.info(f"Starting pipeline run for pipeline {self.pipeline_id}")
150
+ response = self.STUB.PostPipelineVersionRuns(
151
+ run_request, metadata=self.auth_helper.metadata
152
+ )
153
+
154
+ if response.status.code != status_code_pb2.StatusCode.SUCCESS:
155
+ raise UserError(
156
+ f"Failed to start pipeline run: {response.status.description}. Details: {response.status.details}"
157
+ )
158
+
159
+ if not response.pipeline_version_runs:
160
+ raise UserError("No pipeline version run was created")
161
+
162
+ pipeline_version_run = response.pipeline_version_runs[0]
163
+ run_id = pipeline_version_run.id or self.pipeline_version_run_id
164
+
165
+ logger.info(f"Pipeline version run created with ID: {run_id}")
166
+
167
+ # Monitor the run
168
+ return self._monitor_pipeline_run(run_id, timeout, monitor_interval)
169
+
170
+ def monitor_only(self, timeout: int = 3600, monitor_interval: int = 10) -> Dict:
171
+ """Monitor an existing pipeline run without starting a new one.
172
+
173
+ Args:
174
+ timeout (int): Maximum time to wait for completion in seconds. Default 3600 (1 hour).
175
+ monitor_interval (int): Interval between status checks in seconds. Default 10.
176
+
177
+ Returns:
178
+ Dict: The pipeline run result.
179
+ """
180
+ if not self.pipeline_version_run_id:
181
+ raise UserError("pipeline_version_run_id is required for monitoring existing runs")
182
+
183
+ logger.info(f"Monitoring existing pipeline run with ID: {self.pipeline_version_run_id}")
184
+
185
+ # Monitor the existing run
186
+ return self._monitor_pipeline_run(self.pipeline_version_run_id, timeout, monitor_interval)
187
+
188
+ def _monitor_pipeline_run(self, run_id: str, timeout: int, monitor_interval: int) -> Dict:
189
+ """Monitor a pipeline version run until completion.
190
+
191
+ Args:
192
+ run_id (str): The pipeline version run ID to monitor.
193
+ timeout (int): Maximum time to wait for completion in seconds.
194
+ monitor_interval (int): Interval between status checks in seconds.
195
+
196
+ Returns:
197
+ Dict: The pipeline run result.
198
+ """
199
+ start_time = time.time()
200
+ seen_logs = set()
201
+
202
+ while time.time() - start_time < timeout:
203
+ # Get run status
204
+ get_run_request = service_pb2.GetPipelineVersionRunRequest()
205
+ get_run_request.user_app_id.CopyFrom(self.user_app_id)
206
+ get_run_request.pipeline_id = self.pipeline_id
207
+ get_run_request.pipeline_version_id = self.pipeline_version_id or ""
208
+ get_run_request.pipeline_version_run_id = run_id
209
+
210
+ try:
211
+ run_response = self.STUB.GetPipelineVersionRun(
212
+ get_run_request, metadata=self.auth_helper.metadata
213
+ )
214
+
215
+ if run_response.status.code != status_code_pb2.StatusCode.SUCCESS:
216
+ logger.error(f"Error getting run status: {run_response.status.description}")
217
+ time.sleep(monitor_interval)
218
+ continue
219
+
220
+ pipeline_run = run_response.pipeline_version_run
221
+
222
+ # Display new log entries
223
+ self._display_new_logs(run_id, seen_logs)
224
+
225
+ elapsed_time = time.time() - start_time
226
+ logger.info(f"Pipeline run monitoring... (elapsed {elapsed_time:.1f}s)")
227
+
228
+ # Check if we have orchestration status
229
+ if (
230
+ hasattr(pipeline_run, 'orchestration_status')
231
+ and pipeline_run.orchestration_status
232
+ ):
233
+ orch_status = pipeline_run.orchestration_status
234
+ if hasattr(orch_status, 'status') and orch_status.status:
235
+ status_code = orch_status.status.code
236
+ status_name = _get_status_name(status_code)
237
+ logger.info(f"Pipeline run status: {status_code} ({status_name})")
238
+
239
+ # Display orchestration status details if available
240
+ if hasattr(orch_status, 'description') and orch_status.description:
241
+ logger.info(f"Orchestration status: {orch_status.description}")
242
+
243
+ # Success codes that allow continuation: JOB_RUNNING, JOB_QUEUED
244
+ if status_code in [64001, 64002]: # JOB_QUEUED, JOB_RUNNING
245
+ logger.info(f"Pipeline run in progress: {status_code} ({status_name})")
246
+ # Continue monitoring
247
+ # Successful terminal state: JOB_COMPLETED
248
+ elif status_code == 64003: # JOB_COMPLETED
249
+ logger.info("Pipeline run completed successfully!")
250
+ return {"status": "success", "pipeline_version_run": pipeline_run}
251
+ # Failure terminal states: JOB_UNEXPECTED_ERROR, JOB_FAILED
252
+ elif status_code in [64004, 64005]: # JOB_FAILED, JOB_UNEXPECTED_ERROR
253
+ logger.error(
254
+ f"Pipeline run failed with status: {status_code} ({status_name})"
255
+ )
256
+ return {"status": "failed", "pipeline_version_run": pipeline_run}
257
+ # Handle legacy SUCCESS status for backward compatibility
258
+ elif status_code == status_code_pb2.StatusCode.SUCCESS:
259
+ logger.info("Pipeline run completed successfully!")
260
+ return {"status": "success", "pipeline_version_run": pipeline_run}
261
+ elif status_code != status_code_pb2.StatusCode.MIXED_STATUS:
262
+ # Log other unexpected statuses but continue monitoring
263
+ logger.warning(
264
+ f"Unexpected pipeline run status: {status_code} ({status_name}). Continuing to monitor..."
265
+ )
266
+
267
+ except Exception as e:
268
+ logger.error(f"Error monitoring pipeline run: {e}")
269
+
270
+ time.sleep(monitor_interval)
271
+
272
+ logger.error(f"Pipeline run timed out after {timeout} seconds")
273
+ return {"status": "timeout"}
274
+
275
+ def _display_new_logs(self, run_id: str, seen_logs: set):
276
+ """Display new log entries for a pipeline version run.
277
+
278
+ Args:
279
+ run_id (str): The pipeline version run ID.
280
+ seen_logs (set): Set of already seen log entry IDs.
281
+ """
282
+ try:
283
+ logs_request = service_pb2.ListLogEntriesRequest()
284
+ logs_request.user_app_id.CopyFrom(self.user_app_id)
285
+ logs_request.pipeline_id = self.pipeline_id
286
+ logs_request.pipeline_version_id = self.pipeline_version_id or ""
287
+ logs_request.pipeline_version_run_id = run_id
288
+ logs_request.log_type = "pipeline.version.run" # Set required log type
289
+ logs_request.page = 1
290
+ logs_request.per_page = 50
291
+
292
+ logs_response = self.STUB.ListLogEntries(
293
+ logs_request, metadata=self.auth_helper.metadata
294
+ )
295
+
296
+ if logs_response.status.code == status_code_pb2.StatusCode.SUCCESS:
297
+ for log_entry in logs_response.log_entries:
298
+ # Use log entry URL or timestamp as unique identifier
299
+ log_id = log_entry.url or f"{log_entry.created_at.seconds}_{log_entry.message}"
300
+ if log_id not in seen_logs:
301
+ seen_logs.add(log_id)
302
+ log_message = f"[LOG] {log_entry.message.strip()}"
303
+
304
+ # Write to file if log_file is specified, otherwise log to console
305
+ if self.log_file:
306
+ with open(self.log_file, 'a', encoding='utf-8') as f:
307
+ f.write(log_message + '\n')
308
+ else:
309
+ logger.info(log_message)
310
+
311
+ except Exception as e:
312
+ logger.debug(f"Error fetching logs: {e}")
@@ -73,6 +73,8 @@ class ModelBuilder:
73
73
  validate_api_ids: bool = True,
74
74
  download_validation_only: bool = False,
75
75
  app_not_found_action: Literal["auto_create", "prompt", "error"] = "error",
76
+ pat: str = None,
77
+ base_url: str = None,
76
78
  ):
77
79
  """
78
80
  :param folder: The folder containing the model.py, config.yaml, requirements.txt and
@@ -83,12 +85,16 @@ class ModelBuilder:
83
85
  just downloading a checkpoint.
84
86
  :param app_not_found_action: Defines how to handle the case when the app is not found.
85
87
  Options: 'auto_create' - create automatically, 'prompt' - ask user, 'error' - raise exception.
88
+ :param pat: Personal access token for authentication. If None, will use environment variables.
89
+ :param base_url: Base URL for the API. If None, will use environment variables.
86
90
  """
87
91
  assert app_not_found_action in ["auto_create", "prompt", "error"], ValueError(
88
92
  f"Expected one of {['auto_create', 'prompt', 'error']}, got {app_not_found_action=}"
89
93
  )
90
94
  self.app_not_found_action = app_not_found_action
91
95
  self._client = None
96
+ self._pat = pat
97
+ self._base_url = base_url
92
98
  if not validate_api_ids: # for backwards compatibility
93
99
  download_validation_only = True
94
100
  self.download_validation_only = download_validation_only
@@ -487,8 +493,20 @@ class ModelBuilder:
487
493
  user_id = model.get('user_id')
488
494
  app_id = model.get('app_id')
489
495
 
490
- self._base_api = os.environ.get('CLARIFAI_API_BASE', 'https://api.clarifai.com')
491
- self._client = BaseClient(user_id=user_id, app_id=app_id, base=self._base_api)
496
+ # Use context parameters if provided, otherwise fall back to environment variables
497
+ self._base_api = (
498
+ self._base_url
499
+ if self._base_url
500
+ else os.environ.get('CLARIFAI_API_BASE', 'https://api.clarifai.com')
501
+ )
502
+
503
+ # Create BaseClient with explicit pat parameter if provided
504
+ if self._pat:
505
+ self._client = BaseClient(
506
+ user_id=user_id, app_id=app_id, base=self._base_api, pat=self._pat
507
+ )
508
+ else:
509
+ self._client = BaseClient(user_id=user_id, app_id=app_id, base=self._base_api)
492
510
 
493
511
  return self._client
494
512
 
@@ -550,6 +568,11 @@ class ModelBuilder:
550
568
  "inference_compute_info not found in the config file"
551
569
  )
552
570
  inference_compute_info = self.config.get('inference_compute_info')
571
+ # Ensure cpu_limit is a string if it exists and is an int
572
+ if 'cpu_limit' in inference_compute_info and isinstance(
573
+ inference_compute_info['cpu_limit'], int
574
+ ):
575
+ inference_compute_info['cpu_limit'] = str(inference_compute_info['cpu_limit'])
553
576
  return json_format.ParseDict(inference_compute_info, resources_pb2.ComputeInfo())
554
577
 
555
578
  def check_model_exists(self):
@@ -1221,15 +1244,17 @@ XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX
1221
1244
  return False
1222
1245
 
1223
1246
 
1224
- def upload_model(folder, stage, skip_dockerfile):
1247
+ def upload_model(folder, stage, skip_dockerfile, pat=None, base_url=None):
1225
1248
  """
1226
1249
  Uploads a model to Clarifai.
1227
1250
 
1228
1251
  :param folder: The folder containing the model files.
1229
1252
  :param stage: The stage we are calling download checkpoints from. Typically this would "upload" and will download checkpoints if config.yaml checkpoints section has when set to "upload". Other options include "runtime" to be used in load_model or "upload" to be used during model upload. Set this stage to whatever you have in config.yaml to force downloading now.
1230
1253
  :param skip_dockerfile: If True, will not create a Dockerfile.
1254
+ :param pat: Personal access token for authentication. If None, will use environment variables.
1255
+ :param base_url: Base URL for the API. If None, will use environment variables.
1231
1256
  """
1232
- builder = ModelBuilder(folder, app_not_found_action="prompt")
1257
+ builder = ModelBuilder(folder, app_not_found_action="prompt", pat=pat, base_url=base_url)
1233
1258
  builder.download_checkpoints(stage=stage)
1234
1259
  if not skip_dockerfile:
1235
1260
  builder.create_dockerfile()
@@ -1,6 +1,7 @@
1
1
  import inspect
2
2
  import itertools
3
3
  import os
4
+ import threading
4
5
  import traceback
5
6
  from abc import ABC
6
7
  from collections import abc
@@ -59,15 +60,22 @@ class ModelClass(ABC):
59
60
  yield item.x + ' ' + str(item.y)
60
61
  '''
61
62
 
63
+ def __init__(self):
64
+ super().__init__()
65
+ self._thread_local = threading.local()
66
+
62
67
  @staticmethod
63
68
  def method(func):
64
69
  setattr(func, _METHOD_INFO_ATTR, _MethodInfo(func))
65
70
  return func
66
71
 
67
72
  def set_output_context(self, prompt_tokens=None, completion_tokens=None):
68
- """This is used to set the prompt and completion tokens in the Output proto"""
69
- self._prompt_tokens = prompt_tokens
70
- self._completion_tokens = completion_tokens
73
+ """Set the prompt and completion tokens for the Output proto.
74
+ In batch mode, call this once per output, in order, before returning each output.
75
+ """
76
+ if not hasattr(self._thread_local, 'token_contexts'):
77
+ self._thread_local.token_contexts = []
78
+ self._thread_local.token_contexts.append((prompt_tokens, completion_tokens))
71
79
 
72
80
  def load_model(self):
73
81
  """Load the model."""
@@ -213,6 +221,7 @@ class ModelClass(ABC):
213
221
  self, request: service_pb2.PostModelOutputsRequest
214
222
  ) -> Iterator[service_pb2.MultiOutputResponse]:
215
223
  try:
224
+ assert len(request.inputs) == 1, "Generate requires exactly one input"
216
225
  method_name = 'generate'
217
226
  if len(request.inputs) > 0 and '_method_name' in request.inputs[0].data.metadata:
218
227
  method_name = request.inputs[0].data.metadata['_method_name']
@@ -385,12 +394,18 @@ class ModelClass(ABC):
385
394
  data = DataConverter.convert_output_data_to_old_format(proto.data)
386
395
  proto.data.CopyFrom(data)
387
396
  proto.status.code = status_code_pb2.SUCCESS
388
- if hasattr(self, "_prompt_tokens") and self._prompt_tokens is not None:
389
- proto.prompt_tokens = self._prompt_tokens
390
- if hasattr(self, "_completion_tokens") and self._completion_tokens is not None:
391
- proto.completion_tokens = self._completion_tokens
392
- self._prompt_tokens = None
393
- self._completion_tokens = None
397
+ # Per-output token context support
398
+ token_contexts = getattr(self._thread_local, 'token_contexts', None)
399
+ prompt_tokens = completion_tokens = None
400
+ if token_contexts and len(token_contexts) > 0:
401
+ prompt_tokens, completion_tokens = token_contexts.pop(0)
402
+ # If this was the last, clean up
403
+ if len(token_contexts) == 0:
404
+ del self._thread_local.token_contexts
405
+ if prompt_tokens is not None:
406
+ proto.prompt_tokens = prompt_tokens
407
+ if completion_tokens is not None:
408
+ proto.completion_tokens = completion_tokens
394
409
  return proto
395
410
 
396
411
  @classmethod
@@ -34,6 +34,7 @@ class OpenAIModelClass(ModelClass):
34
34
  model = None
35
35
 
36
36
  def __init__(self) -> None:
37
+ super().__init__()
37
38
  if self.client is None:
38
39
  raise NotImplementedError("Subclasses must set the 'client' class attribute")
39
40
  if self.model is None: