clarifai 11.2.3__py3-none-any.whl → 11.2.3rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (169) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
  4. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  5. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  11. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  12. clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  13. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  14. clarifai/cli/base.py +81 -228
  15. clarifai/cli/compute_cluster.py +18 -28
  16. clarifai/cli/deployment.py +42 -70
  17. clarifai/cli/model.py +39 -26
  18. clarifai/cli/nodepool.py +41 -62
  19. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
  21. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
  23. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  24. clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  25. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  26. clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
  27. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  28. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  29. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  30. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  31. clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
  32. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  33. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  34. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  35. clarifai/client/app.py +1 -1
  36. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  37. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  38. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  39. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  40. clarifai/client/auth/stub.py +5 -4
  41. clarifai/client/cli/__init__.py +0 -0
  42. clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  43. clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
  44. clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
  45. clarifai/client/cli/base_cli.py +88 -0
  46. clarifai/client/cli/model_cli.py +29 -0
  47. clarifai/client/dataset.py +4 -3
  48. clarifai/client/model.py +159 -393
  49. clarifai/client/model_client.py +502 -0
  50. clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
  51. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  52. clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
  53. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  54. clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
  55. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  56. clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
  57. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  58. clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
  59. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  60. clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
  61. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  62. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  63. clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
  64. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  65. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  66. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  67. clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
  68. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  69. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  70. clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
  71. clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
  72. clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
  73. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  74. clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
  75. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  76. clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
  77. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  78. clarifai/runners/__init__.py +2 -7
  79. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  80. clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
  81. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  82. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +42 -0
  83. clarifai/runners/dockerfile_template/Dockerfile.nim +71 -0
  84. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  85. clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
  86. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  87. clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
  88. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  89. clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
  90. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  91. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  92. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  93. clarifai/runners/models/model_builder.py +138 -51
  94. clarifai/runners/models/model_class.py +441 -28
  95. clarifai/runners/models/model_class_refract.py +80 -0
  96. clarifai/runners/models/model_run_locally.py +25 -89
  97. clarifai/runners/models/model_runner.py +8 -0
  98. clarifai/runners/models/model_servicer.py +11 -2
  99. clarifai/runners/models/model_upload.py +607 -0
  100. clarifai/runners/models/temp.py +25 -0
  101. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  102. clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
  103. clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  104. clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
  105. clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
  106. clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
  107. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  108. clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
  109. clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
  110. clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
  111. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  112. clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
  113. clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
  114. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  115. clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
  116. clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
  117. clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
  118. clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
  119. clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
  120. clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
  121. clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
  122. clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
  123. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  124. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  125. clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
  126. clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
  127. clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
  128. clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
  129. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  130. clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
  131. clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
  132. clarifai/runners/utils/code_script.py +217 -0
  133. clarifai/runners/utils/const.py +8 -9
  134. clarifai/runners/utils/data_handler.py +271 -210
  135. clarifai/runners/utils/data_handler_refract.py +213 -0
  136. clarifai/runners/utils/data_types.py +473 -0
  137. clarifai/runners/utils/data_utils.py +165 -0
  138. clarifai/runners/utils/loader.py +6 -36
  139. clarifai/runners/utils/logger.py +0 -0
  140. clarifai/runners/utils/method_signatures.py +518 -0
  141. clarifai/runners/utils/serializers.py +222 -0
  142. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  143. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  144. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  145. clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
  146. clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
  147. clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
  148. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  149. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  150. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  151. clarifai/utils/cli.py +34 -132
  152. clarifai/utils/constants.py +0 -4
  153. clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
  154. clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
  155. clarifai/utils/logging.py +21 -64
  156. clarifai/utils/misc.py +0 -2
  157. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  158. clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
  159. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  160. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  161. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  162. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/METADATA +3 -4
  163. clarifai-11.2.3rc2.dist-info/RECORD +238 -0
  164. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/WHEEL +1 -1
  165. clarifai/utils/config.py +0 -105
  166. clarifai-11.2.3.dist-info/RECORD +0 -102
  167. {clarifai-11.2.3.dist-info/licenses → clarifai-11.2.3rc2.dist-info}/LICENSE +0 -0
  168. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/entry_points.txt +0 -0
  169. {clarifai-11.2.3.dist-info → clarifai-11.2.3rc2.dist-info}/top_level.txt +0 -0
clarifai/cli/model.py CHANGED
@@ -5,7 +5,7 @@ from clarifai.cli.base import cli
5
5
 
6
6
  @cli.group(['model'])
7
7
  def model():
8
- """Manage models: upload, test, local dev, predict, etc"""
8
+ """Manage models: upload, test locally, run locally, predict, and more"""
9
9
 
10
10
 
11
11
  @model.command()
@@ -76,6 +76,31 @@ def download_checkpoints(model_path, out_path, stage):
76
76
  required=False,
77
77
  default=".",
78
78
  )
79
+ @click.option(
80
+ '--out_path',
81
+ type=click.Path(exists=False),
82
+ required=False,
83
+ default=None,
84
+ help='Path to write the method signature defitions to. If not provided, use stdout.')
85
+ def signatures(model_path, out_path):
86
+ """Generate method signatures for the model."""
87
+
88
+ from clarifai.runners.models.model_builder import ModelBuilder
89
+ builder = ModelBuilder(model_path, download_validation_only=True)
90
+ signatures = builder.method_signatures_yaml()
91
+ if out_path:
92
+ with open(out_path, 'w') as f:
93
+ f.write(signatures)
94
+ else:
95
+ click.echo(signatures)
96
+
97
+
98
+ @model.command()
99
+ @click.option(
100
+ '--model_path',
101
+ type=click.Path(exists=True),
102
+ required=True,
103
+ help='Path to the model directory.')
79
104
  @click.option(
80
105
  '--mode',
81
106
  type=click.Choice(['env', 'container'], case_sensitive=False),
@@ -96,14 +121,11 @@ def download_checkpoints(model_path, out_path, stage):
96
121
  help=
97
122
  'Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.'
98
123
  )
99
- @click.option(
100
- '--skip_dockerfile',
101
- is_flag=True,
102
- help=
103
- 'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
104
- )
105
- def test_locally(model_path, keep_env=False, keep_image=False, mode='env', skip_dockerfile=False):
106
- """Test model locally."""
124
+ def test_locally(model_path, keep_env=False, keep_image=False, mode='env'):
125
+ """Test model locally.
126
+
127
+ MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
128
+ """
107
129
  try:
108
130
  from clarifai.runners.models import model_run_locally
109
131
  if mode == 'env' and keep_image:
@@ -117,11 +139,7 @@ def test_locally(model_path, keep_env=False, keep_image=False, mode='env', skip_
117
139
  elif mode == "container":
118
140
  click.echo("Testing model locally inside a container...")
119
141
  model_run_locally.main(
120
- model_path,
121
- inside_container=True,
122
- run_model_server=False,
123
- keep_image=keep_image,
124
- skip_dockerfile=skip_dockerfile)
142
+ model_path, inside_container=True, run_model_server=False, keep_image=keep_image)
125
143
  click.echo("Model tested successfully.")
126
144
  except Exception as e:
127
145
  click.echo(f"Failed to test model locally: {e}", err=True)
@@ -161,14 +179,11 @@ def test_locally(model_path, keep_env=False, keep_image=False, mode='env', skip_
161
179
  help=
162
180
  'Keep the Docker image after testing the model locally (applicable for container mode). Defaults to False.'
163
181
  )
164
- @click.option(
165
- '--skip_dockerfile',
166
- is_flag=True,
167
- help=
168
- 'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile. Apply for `--mode conatainer`.',
169
- )
170
- def run_locally(model_path, port, mode, keep_env, keep_image, skip_dockerfile=False):
171
- """Run the model locally and start a gRPC server to serve the model."""
182
+ def run_locally(model_path, port, mode, keep_env, keep_image):
183
+ """Run the model locally and start a gRPC server to serve the model.
184
+
185
+ MODEL_PATH: Path to the model directory. If not specified, the current directory is used by default.
186
+ """
172
187
  try:
173
188
  from clarifai.runners.models import model_run_locally
174
189
  if mode == 'env' and keep_image:
@@ -186,8 +201,7 @@ def run_locally(model_path, port, mode, keep_env, keep_image, skip_dockerfile=Fa
186
201
  inside_container=True,
187
202
  run_model_server=True,
188
203
  port=port,
189
- keep_image=keep_image,
190
- skip_dockerfile=skip_dockerfile)
204
+ keep_image=keep_image)
191
205
  click.echo(f"Model server started locally from {model_path} in {mode} mode.")
192
206
  except Exception as e:
193
207
  click.echo(f"Failed to starts model server locally: {e}", err=True)
@@ -241,8 +255,7 @@ def predict(ctx, config, model_id, user_id, app_id, model_url, file_path, url, b
241
255
  import json
242
256
 
243
257
  from clarifai.client.model import Model
244
- from clarifai.utils.cli import from_yaml, validate_context
245
- validate_context(ctx)
258
+ from clarifai.utils.cli import from_yaml
246
259
  if config:
247
260
  config = from_yaml(config)
248
261
  model_id, user_id, app_id, model_url, file_path, url, bytes, input_type, compute_cluster_id, nodepool_id, deployment_id, inference_params, output_config = (
clarifai/cli/nodepool.py CHANGED
@@ -1,29 +1,32 @@
1
1
  import click
2
-
3
2
  from clarifai.cli.base import cli
4
- from clarifai.utils.cli import (AliasedGroup, display_co_resources, dump_yaml, from_yaml,
5
- validate_context)
3
+ from clarifai.client.compute_cluster import ComputeCluster
4
+ from clarifai.utils.cli import display_co_resources, dump_yaml, from_yaml
6
5
 
7
6
 
8
- @cli.group(['nodepool', 'np'], cls=AliasedGroup)
7
+ @cli.group(['nodepool', 'np'])
9
8
  def nodepool():
10
9
  """Manage Nodepools: create, delete, list"""
10
+ pass
11
11
 
12
12
 
13
- @nodepool.command(['c'])
14
- @click.argument('compute_cluster_id')
15
- @click.argument('nodepool_id')
13
+ @nodepool.command()
14
+ @click.option(
15
+ '-cc_id',
16
+ '--compute_cluster_id',
17
+ required=False,
18
+ help='Compute Cluster ID for the compute cluster to interact with.')
16
19
  @click.option(
17
20
  '--config',
18
21
  type=click.Path(exists=True),
19
22
  required=True,
20
23
  help='Path to the nodepool config file.')
24
+ @click.option(
25
+ '-np_id', '--nodepool_id', required=False, help='New Nodepool ID for the nodepool to create.')
21
26
  @click.pass_context
22
- def create(ctx, compute_cluster_id, nodepool_id, config):
27
+ def create(ctx, compute_cluster_id, config, nodepool_id):
23
28
  """Create a new Nodepool with the given config file."""
24
- from clarifai.client.compute_cluster import ComputeCluster
25
29
 
26
- validate_context(ctx)
27
30
  nodepool_config = from_yaml(config)
28
31
  if not compute_cluster_id:
29
32
  if 'compute_cluster' not in nodepool_config['nodepool']:
@@ -39,74 +42,50 @@ def create(ctx, compute_cluster_id, nodepool_id, config):
39
42
 
40
43
  compute_cluster = ComputeCluster(
41
44
  compute_cluster_id=compute_cluster_id,
42
- user_id=ctx.obj.current.user_id,
43
- pat=ctx.obj.current.pat,
44
- base_url=ctx.obj.current.api_base)
45
+ user_id=ctx.obj['user_id'],
46
+ pat=ctx.obj['pat'],
47
+ base_url=ctx.obj['base_url'])
45
48
  if nodepool_id:
46
49
  compute_cluster.create_nodepool(config, nodepool_id=nodepool_id)
47
50
  else:
48
51
  compute_cluster.create_nodepool(config)
49
52
 
50
53
 
51
- @nodepool.command(['ls'])
52
- @click.argument('compute_cluster_id', default="")
54
+ @nodepool.command()
55
+ @click.option(
56
+ '-cc_id',
57
+ '--compute_cluster_id',
58
+ required=True,
59
+ help='Compute Cluster ID for the compute cluster to interact with.')
53
60
  @click.option('--page_no', required=False, help='Page number to list.', default=1)
54
- @click.option('--per_page', required=False, help='Number of items per page.', default=128)
61
+ @click.option('--per_page', required=False, help='Number of items per page.', default=16)
55
62
  @click.pass_context
56
63
  def list(ctx, compute_cluster_id, page_no, per_page):
57
- """List all nodepools for the user across all compute clusters. If compute_cluster_id is provided
58
- it will list only within that compute cluster. """
59
- from clarifai.client.compute_cluster import ComputeCluster
60
- from clarifai.client.user import User
61
-
62
- validate_context(ctx)
63
-
64
- cc_id = compute_cluster_id
64
+ """List all nodepools for the user."""
65
65
 
66
- if cc_id:
67
- compute_cluster = ComputeCluster(
68
- compute_cluster_id=cc_id,
69
- user_id=ctx.obj.current.user_id,
70
- pat=ctx.obj.current.pat,
71
- base_url=ctx.obj.current.api_base)
72
- response = compute_cluster.list_nodepools(page_no, per_page)
73
- else:
74
- user = User(
75
- user_id=ctx.obj.current.user_id,
76
- pat=ctx.obj.current.pat,
77
- base_url=ctx.obj.current.api_base)
78
- ccs = user.list_compute_clusters(page_no, per_page)
79
- response = []
80
- for cc in ccs:
81
- compute_cluster = ComputeCluster(
82
- compute_cluster_id=cc.id,
83
- user_id=ctx.obj.current.user_id,
84
- pat=ctx.obj.current.pat,
85
- base_url=ctx.obj.current.api_base)
86
- response.extend([i for i in compute_cluster.list_nodepools(page_no, per_page)])
87
-
88
- display_co_resources(
89
- response,
90
- custom_columns={
91
- 'ID': lambda c: c.id,
92
- 'USER_ID': lambda c: c.compute_cluster.user_id,
93
- 'COMPUTE_CLUSTER_ID': lambda c: c.compute_cluster.id,
94
- 'DESCRIPTION': lambda c: c.description,
95
- })
66
+ compute_cluster = ComputeCluster(
67
+ compute_cluster_id=compute_cluster_id,
68
+ user_id=ctx.obj['user_id'],
69
+ pat=ctx.obj['pat'],
70
+ base_url=ctx.obj['base_url'])
71
+ response = compute_cluster.list_nodepools(page_no, per_page)
72
+ display_co_resources(response, "Nodepool")
96
73
 
97
74
 
98
- @nodepool.command(['rm'])
99
- @click.argument('compute_cluster_id')
100
- @click.argument('nodepool_id')
75
+ @nodepool.command()
76
+ @click.option(
77
+ '-cc_id',
78
+ '--compute_cluster_id',
79
+ required=True,
80
+ help='Compute Cluster ID for the compute cluster to interact with.')
81
+ @click.option('-np_id', '--nodepool_id', help='Nodepool ID of the user to delete.')
101
82
  @click.pass_context
102
83
  def delete(ctx, compute_cluster_id, nodepool_id):
103
84
  """Deletes a nodepool for the user."""
104
- from clarifai.client.compute_cluster import ComputeCluster
105
85
 
106
- validate_context(ctx)
107
86
  compute_cluster = ComputeCluster(
108
87
  compute_cluster_id=compute_cluster_id,
109
- user_id=ctx.obj.current.user_id,
110
- pat=ctx.obj.current.pat,
111
- base_url=ctx.obj.current.api_base)
88
+ user_id=ctx.obj['user_id'],
89
+ pat=ctx.obj['pat'],
90
+ base_url=ctx.obj['base_url'])
112
91
  compute_cluster.delete_nodepools([nodepool_id])
clarifai/client/app.py CHANGED
@@ -629,7 +629,7 @@ class App(Lister, BaseClient):
629
629
 
630
630
  Args:
631
631
  model_id (str): The model ID for the model to interact with.
632
- model_version (Dict): The model version ID for the model version to interact with.
632
+ model_version_id (str): The model version ID for the model version to interact with.
633
633
 
634
634
  Returns:
635
635
  Model: A Model object for the existing model ID.
@@ -1,4 +1,5 @@
1
1
  import itertools
2
+ import logging
2
3
  import time
3
4
  from concurrent.futures import ThreadPoolExecutor
4
5
 
@@ -7,7 +8,7 @@ from clarifai_grpc.grpc.api.status import status_code_pb2
7
8
 
8
9
  from clarifai.client.auth.helper import ClarifaiAuthHelper
9
10
  from clarifai.client.auth.register import RpcCallable, V2Stub
10
- from clarifai.utils.logging import logger
11
+
11
12
  throttle_status_codes = {
12
13
  status_code_pb2.CONN_THROTTLED,
13
14
  status_code_pb2.CONN_EXCEED_HOURLY_LIMIT,
@@ -25,7 +26,7 @@ def validate_response(response, attempt, max_attempts):
25
26
  def handle_simple_response(response):
26
27
  if hasattr(response, 'status') and hasattr(response.status, 'code'):
27
28
  if (response.status.code in throttle_status_codes) and attempt < max_attempts:
28
- logger.debug('Retrying with status %s' % str(response.status))
29
+ logging.debug('Retrying with status %s' % str(response.status))
29
30
  return None # Indicates a retry is needed
30
31
  else:
31
32
  return response
@@ -41,7 +42,7 @@ def validate_response(response, attempt, max_attempts):
41
42
  return itertools.chain([validated_response], response)
42
43
  return None # Indicates a retry is needed
43
44
  except grpc.RpcError as e:
44
- logger.error('Error processing streaming response: %s' % str(e))
45
+ logging.error('Error processing streaming response: %s' % str(e))
45
46
  return None # Indicates an error
46
47
  else:
47
48
  # Handle simple response validation
@@ -142,7 +143,7 @@ class _RetryRpcCallable(RpcCallable):
142
143
  return v
143
144
  except grpc.RpcError as e:
144
145
  if (e.code() in retry_codes_grpc) and attempt < self.max_attempts:
145
- logger.debug('Retrying with status %s' % e.code())
146
+ logging.debug('Retrying with status %s' % e.code())
146
147
  else:
147
148
  raise
148
149
 
File without changes
@@ -0,0 +1,88 @@
1
+ import click
2
+ import os
3
+ import yaml
4
+
5
+ @click.group()
6
+ @click.pass_context
7
+ def cli(ctx):
8
+ """Clarifai CLI"""
9
+ ctx.ensure_object(dict)
10
+ config_path = 'config.yaml'
11
+ if os.path.exists(config_path):
12
+ ctx.obj = _from_yaml(config_path)
13
+ print("Loaded config from file.")
14
+ print(f"Config: {ctx.obj}")
15
+ else:
16
+ ctx.obj = {}
17
+
18
+ def _from_yaml(filename: str):
19
+ try:
20
+ with open(filename, 'r') as f:
21
+ return yaml.safe_load(f)
22
+ except yaml.YAMLError as e:
23
+ click.echo(f"Error reading YAML file: {e}", err=True)
24
+ return {}
25
+
26
+ def _dump_yaml(data, filename: str):
27
+ try:
28
+ with open(filename, 'w') as f:
29
+ yaml.dump(data, f)
30
+ except Exception as e:
31
+ click.echo(f"Error writing YAML file: {e}", err=True)
32
+
33
+ def _set_base_url(env):
34
+ environments = {'prod': 'https://api.clarifai.com', 'staging': 'https://api-staging.clarifai.com', 'dev': 'https://api-dev.clarifai.com'}
35
+ return environments.get(env, 'https://api.clarifai.com')
36
+
37
+
38
+ @cli.command()
39
+ @click.option('--config', type=click.Path(), required=False, help='Path to the config file')
40
+ @click.option('-e', '--env', required=False, help='Environment', type=click.Choice(['prod', 'staging', 'dev']))
41
+ @click.option('--user_id', required=False, help='User ID')
42
+ @click.pass_context
43
+ def login(ctx, config, env, user_id):
44
+ """Login command to set PAT and other configurations."""
45
+
46
+ if config and os.path.exists(config):
47
+ ctx.obj = _from_yaml(config)
48
+
49
+ if 'pat' in ctx.obj:
50
+ os.environ["CLARIFAI_PAT"] = ctx.obj['pat']
51
+ click.echo("Loaded PAT from config file.")
52
+ elif 'CLARIFAI_PAT' in os.environ:
53
+ ctx.obj['pat'] = os.environ["CLARIFAI_PAT"]
54
+ click.echo("Loaded PAT from environment variable.")
55
+ else:
56
+ _pat = click.prompt("Get your PAT from https://clarifai.com/settings/security and pass it here", type=str)
57
+ os.environ["CLARIFAI_PAT"] = _pat
58
+ ctx.obj['pat'] = _pat
59
+ click.echo("PAT saved successfully.")
60
+
61
+ if user_id:
62
+ ctx.obj['user_id'] = user_id
63
+ os.environ["CLARIFAI_USER_ID"] = ctx.obj['user_id']
64
+ elif 'user_id' in ctx.obj or 'CLARIFAI_USER_ID' in os.environ:
65
+ ctx.obj['user_id'] = ctx.obj.get('user_id', os.environ["CLARIFAI_USER_ID"])
66
+ os.environ["CLARIFAI_USER_ID"] = ctx.obj['user_id']
67
+
68
+ if env:
69
+ ctx.obj['env'] = env
70
+ ctx.obj['base_url'] = _set_base_url(env)
71
+ os.environ["CLARIFAI_API_BASE"] = ctx.obj['base_url']
72
+ elif 'env' in ctx.obj:
73
+ ctx.obj['env'] = ctx.obj.get('env', "prod")
74
+ ctx.obj['base_url'] = _set_base_url(ctx.obj['env'])
75
+ os.environ["CLARIFAI_API_BASE"] = ctx.obj['base_url']
76
+ elif 'CLARIFAI_API_BASE' in os.environ:
77
+ ctx.obj['base_url'] = os.environ["CLARIFAI_API_BASE"]
78
+
79
+ _dump_yaml(ctx.obj, 'config.yaml')
80
+
81
+ click.echo("Login successful.")
82
+
83
+ # Import the model CLI commands to register them
84
+ from clarifai.client.cli.model_cli import model # Ensure this is the correct import path
85
+
86
+
87
+ if __name__ == '__main__':
88
+ cli()
@@ -0,0 +1,29 @@
1
+ import click
2
+ from clarifai.client.cli.base_cli import cli
3
+
4
+ @cli.group()
5
+ def model():
6
+ """Manage models: upload, test locally"""
7
+ pass
8
+
9
+ @model.command()
10
+ @click.argument('model_path', type=click.Path(exists=True))
11
+ @click.option('--download_checkpoints', is_flag=True, help='Flag to download checkpoints before uploading and including them in the tar file that is uploaded. Defaults to False, which will attempt to download them at docker build time.', )
12
+ @click.option('--skip_dockerfile', is_flag =True, help='Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.', )
13
+ def upload(model_path, download_checkpoints, skip_dockerfile):
14
+ """Upload a model to Clarifai."""
15
+ from clarifai.runners.models import model_upload
16
+
17
+ model_upload.main(model_path, download_checkpoints, skip_dockerfile)
18
+
19
+ @model.command()
20
+ @click.argument('model_path', type=click.Path(exists=True))
21
+ def test_locally(model_path):
22
+ """Test model locally."""
23
+ try:
24
+ from clarifai.runners.models import run_test_locally
25
+ run_test_locally.main(model_path)
26
+ click.echo(f"Model tested locally from {model_path}.")
27
+ except Exception as e:
28
+ click.echo(f"Failed to test model locally: {e}", err=True)
29
+
@@ -1,3 +1,4 @@
1
+ import logging
1
2
  import os
2
3
  import time
3
4
  import uuid
@@ -353,7 +354,7 @@ class Dataset(Lister, BaseClient):
353
354
  break
354
355
  if failed_input_ids:
355
356
  retry_input_ids = [dataset_obj.all_input_ids[id] for id in failed_input_ids]
356
- logger.warning(
357
+ logging.warning(
357
358
  f"Retrying upload for {len(failed_input_ids)} inputs in current batch: {retry_input_ids}\n"
358
359
  )
359
360
  failed_retrying_inputs, _, retry_response = self._upload_inputs_annotations(
@@ -493,7 +494,7 @@ class Dataset(Lister, BaseClient):
493
494
  add_file_handler(self.logger, f"Dataset_Upload{str(int(datetime.now().timestamp()))}.log")
494
495
 
495
496
  if retry_duplicates and duplicate_input_ids:
496
- logger.warning(f"Retrying upload for {len(duplicate_input_ids)} duplicate inputs...\n")
497
+ logging.warning(f"Retrying upload for {len(duplicate_input_ids)} duplicate inputs...\n")
497
498
  duplicate_inputs_indexes = [input["Index"] for input in duplicate_input_ids]
498
499
  self.upload_dataset(
499
500
  dataloader=dataloader,
@@ -504,7 +505,7 @@ class Dataset(Lister, BaseClient):
504
505
 
505
506
  if failed_input_ids:
506
507
  #failed_inputs= ([input["Input_ID"] for input in failed_input_ids])
507
- logger.warning(f"Retrying upload for {len(failed_input_ids)} failed inputs...\n")
508
+ logging.warning(f"Retrying upload for {len(failed_input_ids)} failed inputs...\n")
508
509
  failed_input_indexes = [input["Index"] for input in failed_input_ids]
509
510
  self.upload_dataset(
510
511
  dataloader=dataloader, log_retry_ids=failed_input_indexes, is_log_retry=True, **kwargs)