clarifai 11.0.7rc2__py3-none-any.whl → 11.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (109) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/cli/model.py +15 -42
  3. clarifai/client/input.py +21 -3
  4. clarifai/runners/__init__.py +2 -2
  5. clarifai/runners/dockerfile_template/Dockerfile.template +2 -6
  6. clarifai/runners/models/base_typed_model.py +2 -2
  7. clarifai/runners/models/{model_upload.py → model_builder.py} +98 -12
  8. clarifai/runners/models/model_run_locally.py +23 -60
  9. clarifai/runners/models/model_runner.py +6 -8
  10. clarifai/runners/models/model_servicer.py +6 -6
  11. clarifai/runners/server.py +9 -45
  12. clarifai/runners/utils/loader.py +34 -1
  13. clarifai/utils/misc.py +11 -0
  14. {clarifai-11.0.7rc2.dist-info → clarifai-11.1.0.dist-info}/METADATA +26 -14
  15. clarifai-11.1.0.dist-info/RECORD +101 -0
  16. {clarifai-11.0.7rc2.dist-info → clarifai-11.1.0.dist-info}/WHEEL +1 -1
  17. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  18. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  19. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  20. clarifai/cli/__main__.py~ +0 -4
  21. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  22. clarifai/cli/__pycache__/__main__.cpython-310.pyc +0 -0
  23. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  24. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  25. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  26. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  27. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  28. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  29. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  30. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  31. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  32. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  33. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  34. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  35. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  36. clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
  37. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  38. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  39. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  40. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  41. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  42. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  43. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  44. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  45. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  46. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  47. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  48. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  49. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  50. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  51. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  52. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  53. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  54. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  55. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  56. clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
  57. clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
  58. clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
  59. clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  60. clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
  61. clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
  62. clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
  63. clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
  64. clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
  65. clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
  66. clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
  67. clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
  68. clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
  69. clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
  70. clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
  71. clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
  72. clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
  73. clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
  74. clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
  75. clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
  76. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  77. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  78. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  79. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  80. clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
  81. clarifai/runners/dockerfile_template/Dockerfile.debug +0 -9
  82. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  83. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  84. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  85. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  86. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  87. clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
  88. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  89. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  90. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  91. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  92. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  93. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  94. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  95. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  96. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  97. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  98. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  99. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  100. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  101. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  102. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  103. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  104. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  105. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  106. clarifai-11.0.7rc2.dist-info/RECORD +0 -190
  107. {clarifai-11.0.7rc2.dist-info → clarifai-11.1.0.dist-info}/LICENSE +0 -0
  108. {clarifai-11.0.7rc2.dist-info → clarifai-11.1.0.dist-info}/entry_points.txt +0 -0
  109. {clarifai-11.0.7rc2.dist-info → clarifai-11.1.0.dist-info}/top_level.txt +0 -0
clarifai/__init__.py CHANGED
@@ -1 +1 @@
1
- __version__ = "11.0.7rc2"
1
+ __version__ = "11.1.0"
clarifai/cli/model.py CHANGED
@@ -28,9 +28,8 @@ def model():
28
28
  )
29
29
  def upload(model_path, download_checkpoints, skip_dockerfile):
30
30
  """Upload a model to Clarifai."""
31
- from clarifai.runners.models import model_upload
32
-
33
- model_upload.main(model_path, download_checkpoints, skip_dockerfile)
31
+ from clarifai.runners.models.model_builder import upload_model
32
+ upload_model(model_path, download_checkpoints, skip_dockerfile)
34
33
 
35
34
 
36
35
  @model.command()
@@ -50,9 +49,9 @@ def upload(model_path, download_checkpoints, skip_dockerfile):
50
49
  def download_checkpoints(model_path, out_path):
51
50
  """Download checkpoints from external source to local model_path"""
52
51
 
53
- from clarifai.runners.models.model_upload import ModelUploader
54
- uploader = ModelUploader(model_path, download_validation_only=True)
55
- uploader.download_checkpoints(out_path)
52
+ from clarifai.runners.models.model_builder import ModelBuilder
53
+ builder = ModelBuilder(model_path, download_validation_only=True)
54
+ builder.download_checkpoints(out_path)
56
55
 
57
56
 
58
57
  @model.command()
@@ -173,8 +172,6 @@ def run_locally(model_path, port, mode, keep_env, keep_image):
173
172
  @click.option('--file_path', required=False, help='File path of file for the model to predict')
174
173
  @click.option('--url', required=False, help='URL to the file for the model to predict')
175
174
  @click.option('--bytes', required=False, help='Bytes to the file for the model to predict')
176
- @click.option(
177
- '--input_id', required=False, help='Existing input id in the app for the model to predict')
178
175
  @click.option('--input_type', required=False, help='Type of input')
179
176
  @click.option(
180
177
  '-cc_id',
@@ -188,36 +185,28 @@ def run_locally(model_path, port, mode, keep_env, keep_image):
188
185
  '--inference_params', required=False, default='{}', help='Inference parameters to override')
189
186
  @click.option('--output_config', required=False, default='{}', help='Output config to override')
190
187
  @click.pass_context
191
- def predict(ctx, config, model_id, user_id, app_id, model_url, file_path, url, bytes, input_id,
192
- input_type, compute_cluster_id, nodepool_id, deployment_id, inference_params,
193
- output_config):
188
+ def predict(ctx, config, model_id, user_id, app_id, model_url, file_path, url, bytes, input_type,
189
+ compute_cluster_id, nodepool_id, deployment_id, inference_params, output_config):
194
190
  """Predict using the given model"""
195
191
  import json
196
192
 
197
- from clarifai.client.deployment import Deployment
198
- from clarifai.client.input import Input
199
193
  from clarifai.client.model import Model
200
- from clarifai.client.nodepool import Nodepool
201
194
  from clarifai.utils.cli import from_yaml
202
195
  if config:
203
196
  config = from_yaml(config)
204
- model_id, user_id, app_id, model_url, file_path, url, bytes, input_id, input_type, compute_cluster_id, nodepool_id, deployment_id, inference_params, output_config = (
197
+ model_id, user_id, app_id, model_url, file_path, url, bytes, input_type, compute_cluster_id, nodepool_id, deployment_id, inference_params, output_config = (
205
198
  config.get(k, v)
206
199
  for k, v in [('model_id', model_id), ('user_id', user_id), ('app_id', app_id), (
207
200
  'model_url', model_url), ('file_path', file_path), ('url', url), ('bytes', bytes), (
208
- 'input_id',
209
- input_id), ('input_type',
210
- input_type), ('compute_cluster_id',
211
- compute_cluster_id), ('nodepool_id', nodepool_id), (
212
- 'deployment_id',
213
- deployment_id), ('inference_params',
214
- inference_params), ('output_config',
215
- output_config)])
201
+ 'input_type', input_type), ('compute_cluster_id', compute_cluster_id), (
202
+ 'nodepool_id',
203
+ nodepool_id), ('deployment_id',
204
+ deployment_id), ('inference_params',
205
+ inference_params), ('output_config',
206
+ output_config)])
216
207
  if sum([opt[1] for opt in [(model_id, 1), (user_id, 1), (app_id, 1), (model_url, 3)]
217
208
  if opt[0]]) != 3:
218
209
  raise ValueError("Either --model_id & --user_id & --app_id or --model_url must be provided.")
219
- if sum([1 for opt in [file_path, url, bytes, input_id] if opt]) != 1:
220
- raise ValueError("Exactly one of --file_path, --url, --bytes or --input_id must be provided.")
221
210
  if compute_cluster_id or nodepool_id or deployment_id:
222
211
  if sum([
223
212
  opt[1] for opt in [(compute_cluster_id, 0.5), (nodepool_id, 0.5), (deployment_id, 1)]
@@ -267,21 +256,5 @@ def predict(ctx, config, model_id, user_id, app_id, model_url, file_path, url, b
267
256
  nodepool_id=nodepool_id,
268
257
  deployment_id=deployment_id,
269
258
  inference_params=inference_params,
270
- output_config=output_config)
271
- elif input_id:
272
- inputs = [Input.get_input(input_id)]
273
- runner_selector = None
274
- if deployment_id:
275
- runner_selector = Deployment.get_runner_selector(
276
- user_id=ctx.obj['user_id'], deployment_id=deployment_id)
277
- elif compute_cluster_id and nodepool_id:
278
- runner_selector = Nodepool.get_runner_selector(
279
- user_id=ctx.obj['user_id'],
280
- compute_cluster_id=compute_cluster_id,
281
- nodepool_id=nodepool_id)
282
- model_prediction = model.predict(
283
- inputs=inputs,
284
- runner_selector=runner_selector,
285
- inference_params=inference_params,
286
- output_config=output_config)
259
+ output_config=output_config) ## TO DO: Add support for input_id
287
260
  click.echo(model_prediction)
clarifai/client/input.py CHANGED
@@ -22,7 +22,7 @@ from clarifai.constants.dataset import MAX_RETRIES
22
22
  from clarifai.constants.input import MAX_UPLOAD_BATCH_SIZE
23
23
  from clarifai.errors import UserError
24
24
  from clarifai.utils.logging import logger
25
- from clarifai.utils.misc import BackoffIterator, Chunker
25
+ from clarifai.utils.misc import BackoffIterator, Chunker, clean_input_id
26
26
 
27
27
 
28
28
  class Inputs(Lister, BaseClient):
@@ -282,7 +282,7 @@ class Inputs(Lister, BaseClient):
282
282
  for filename in os.listdir(folder_path):
283
283
  if filename.split('.')[-1] not in ['jpg', 'jpeg', 'png', 'tiff', 'webp']:
284
284
  continue
285
- input_id = filename.split('.')[0]
285
+ input_id = clean_input_id(filename.split('.')[0])
286
286
  image_pb = resources_pb2.Image(base64=open(os.path.join(folder_path, filename), 'rb').read())
287
287
  input_protos.append(
288
288
  Inputs._get_proto(
@@ -473,7 +473,7 @@ class Inputs(Lister, BaseClient):
473
473
  for filename in os.listdir(folder_path):
474
474
  if filename.split('.')[-1] != 'txt':
475
475
  continue
476
- input_id = filename.split('.')[0]
476
+ input_id = clean_input_id(filename.split('.')[0])
477
477
  text_pb = resources_pb2.Text(raw=open(os.path.join(folder_path, filename), 'rb').read())
478
478
  input_protos.append(
479
479
  Inputs._get_proto(
@@ -572,6 +572,24 @@ class Inputs(Lister, BaseClient):
572
572
 
573
573
  return input_mask_proto
574
574
 
575
+ def get_input(self, input_id: str) -> Input:
576
+ """Get Input object of input with input_id provided from the app.
577
+
578
+ Args:
579
+ input_id (str): The input ID for the annotation to get.
580
+
581
+ Returns:
582
+ Input: An Input object for the specified input ID.
583
+
584
+ Example:
585
+ >>> from clarifai.client.input import Inputs
586
+ >>> input_obj = Inputs(user_id = 'user_id', app_id = 'demo_app')
587
+ >>> input_obj.get_input(input_id='demo')
588
+ """
589
+ request = service_pb2.GetInputRequest(user_app_id=self.user_app_id, input_id=input_id)
590
+ response = self._grpc_request(self.STUB.GetInput, request)
591
+ return response.input
592
+
575
593
  def upload_from_url(self,
576
594
  input_id: str,
577
595
  image_url: str = None,
@@ -1,11 +1,11 @@
1
1
  from .models.base_typed_model import AnyAnyModel, TextInputModel, VisualInputModel
2
+ from .models.model_builder import ModelBuilder
2
3
  from .models.model_runner import ModelRunner
3
- from .models.model_upload import ModelUploader
4
4
  from .utils.data_handler import InputDataHandler, OutputDataHandler
5
5
 
6
6
  __all__ = [
7
7
  "ModelRunner",
8
- "ModelUploader",
8
+ "ModelBuilder",
9
9
  "InputDataHandler",
10
10
  "OutputDataHandler",
11
11
  "AnyAnyModel",
@@ -17,7 +17,7 @@ RUN pip install --no-cache-dir -r /home/nonroot/requirements.txt && \
17
17
  FROM --platform=$TARGETPLATFORM ${DOWNLOADER_IMAGE} as downloader
18
18
 
19
19
  # make sure we have the latest clarifai package.
20
- RUN (pip install --upgrade --upgrade-strategy only-if-needed --no-cache-dir clarifai==11.0.7rc1 clarifai-grpc clarifai-protocol || true)
20
+ RUN (pip install --upgrade --upgrade-strategy only-if-needed --no-cache-dir clarifai clarifai-grpc clarifai-protocol || true)
21
21
  #####
22
22
 
23
23
 
@@ -46,10 +46,6 @@ COPY --link=true --from=downloader /venv /venv
46
46
  # This creates the directory that HF downloader will populate and with nonroot:nonroot permissions up.
47
47
  COPY --chown=nonroot:nonroot downloader/unused.yaml /home/nonroot/main/1/checkpoints/.cache/unused.yaml
48
48
 
49
- # Set entryponit to our CLI
50
- # ENTRYPOINT ["python", "-m", "clarifai.cli"]
51
- # ENTRYPOINT ["python", "-m", "clarifai"]
52
-
53
49
  #####
54
50
  # Download checkpoints
55
51
  COPY --link=true config.yaml /home/nonroot/main/
@@ -66,7 +62,7 @@ COPY --link=true --from=pip_packages /venv /venv
66
62
  # for the actual model.
67
63
  # If checkpoints aren't downloaded since a checkpoints: block is not provided, then they will
68
64
  # be in the build context and copied here as well.
69
- COPY --link=true 1/model.py /home/nonroot/main/1/model.py
65
+ COPY --link=true 1 /home/nonroot/main/1
70
66
  # At this point we only need these for validation in the SDK.
71
67
  COPY --link=true requirements.txt config.yaml /home/nonroot/main/
72
68
 
@@ -7,10 +7,10 @@ from clarifai_grpc.grpc.api.service_pb2 import PostModelOutputsRequest
7
7
  from google.protobuf import json_format
8
8
 
9
9
  from ..utils.data_handler import InputDataHandler, OutputDataHandler
10
- from .model_runner import ModelRunner
10
+ from .model_class import ModelClass
11
11
 
12
12
 
13
- class AnyAnyModel(ModelRunner):
13
+ class AnyAnyModel(ModelClass):
14
14
 
15
15
  def load_model(self):
16
16
  """
@@ -1,3 +1,5 @@
1
+ import importlib
2
+ import inspect
1
3
  import os
2
4
  import re
3
5
  import sys
@@ -13,6 +15,7 @@ from rich import print
13
15
  from rich.markup import escape
14
16
 
15
17
  from clarifai.client import BaseClient
18
+ from clarifai.runners.models.model_class import ModelClass
16
19
  from clarifai.runners.utils.const import (
17
20
  AVAILABLE_PYTHON_IMAGES, AVAILABLE_TORCH_IMAGES, CONCEPTS_REQUIRED_MODEL_TYPE,
18
21
  DEFAULT_PYTHON_VERSION, PYTHON_BUILDER_IMAGE, PYTHON_RUNTIME_IMAGE, TORCH_BASE_IMAGE)
@@ -28,7 +31,8 @@ def _clear_line(n: int = 1) -> None:
28
31
  print(LINE_UP, end=LINE_CLEAR, flush=True)
29
32
 
30
33
 
31
- class ModelUploader:
34
+ class ModelBuilder:
35
+ DEFAULT_CHECKPOINT_SIZE = 50 * 1024**3 # 50 GiB
32
36
 
33
37
  def __init__(self, folder: str, validate_api_ids: bool = True, download_validation_only=False):
34
38
  """
@@ -52,6 +56,55 @@ class ModelUploader:
52
56
  self.inference_compute_info = self._get_inference_compute_info()
53
57
  self.is_v3 = True # Do model build for v3
54
58
 
59
+ def create_model_instance(self, load_model=True):
60
+ """
61
+ Create an instance of the model class, as specified in the config file.
62
+ """
63
+ # look for default model.py file location
64
+ for loc in ["model.py", "1/model.py"]:
65
+ model_file = os.path.join(self.folder, loc)
66
+ if os.path.exists(model_file):
67
+ break
68
+ if not os.path.exists(model_file):
69
+ raise Exception("Model file not found.")
70
+
71
+ module_name = os.path.basename(model_file).replace(".py", "")
72
+
73
+ spec = importlib.util.spec_from_file_location(module_name, model_file)
74
+ module = importlib.util.module_from_spec(spec)
75
+ sys.modules[module_name] = module
76
+ spec.loader.exec_module(module)
77
+
78
+ # Find all classes in the model.py file that are subclasses of ModelClass
79
+ classes = [
80
+ cls for _, cls in inspect.getmembers(module, inspect.isclass)
81
+ if issubclass(cls, ModelClass) and cls.__module__ == module.__name__
82
+ ]
83
+ # Ensure there is exactly one subclass of BaseRunner in the model.py file
84
+ if len(classes) != 1:
85
+ # check for old inheritence structure, ModelRunner used to be a ModelClass
86
+ runner_classes = [
87
+ cls for _, cls in inspect.getmembers(module, inspect.isclass)
88
+ if cls.__module__ == module.__name__ and any(c.__name__ == 'ModelRunner'
89
+ for c in cls.__bases__)
90
+ ]
91
+ if runner_classes and len(runner_classes) == 1:
92
+ raise Exception(
93
+ f'Could not determine model class.'
94
+ f' Models should now inherit from {ModelClass.__module__}.ModelClass, not ModelRunner.'
95
+ f' Please update your model "{runner_classes[0].__name__}" to inherit from ModelClass.'
96
+ )
97
+ raise Exception(
98
+ "Could not determine model class. There should be exactly one model inheriting from ModelClass defined in the model.py"
99
+ )
100
+ model_class = classes[0]
101
+
102
+ # initialize the model
103
+ model = model_class()
104
+ if load_model:
105
+ model.load_model()
106
+ return model
107
+
55
108
  def _validate_folder(self, folder):
56
109
  if folder == ".":
57
110
  folder = "" # will getcwd() next which ends with /
@@ -102,6 +155,9 @@ class ModelUploader:
102
155
  resp = self.client.STUB.GetApp(service_pb2.GetAppRequest(user_app_id=self.client.user_app_id))
103
156
  if resp.status.code == status_code_pb2.SUCCESS:
104
157
  return True
158
+ logger.error(
159
+ f"Error checking API {self._base_api} for user app {self.client.user_app_id.user_id}/{self.client.user_app_id.app_id}. Error code: {resp.status.code}"
160
+ )
105
161
  return False
106
162
 
107
163
  def _validate_config_model(self):
@@ -148,6 +204,24 @@ class ModelUploader:
148
204
  )
149
205
  logger.info("Continuing without Hugging Face token")
150
206
 
207
+ @staticmethod
208
+ def _get_tar_file_content_size(tar_file_path):
209
+ """
210
+ Calculates the total size of the contents of a tar file.
211
+
212
+ Args:
213
+ tar_file_path (str): The path to the tar file.
214
+
215
+ Returns:
216
+ int: The total size of the contents in bytes.
217
+ """
218
+ total_size = 0
219
+ with tarfile.open(tar_file_path, 'r') as tar:
220
+ for member in tar:
221
+ if member.isfile():
222
+ total_size += member.size
223
+ return total_size
224
+
151
225
  @property
152
226
  def client(self):
153
227
  if self._client is None:
@@ -159,9 +233,8 @@ class ModelUploader:
159
233
  user_id = model.get('user_id')
160
234
  app_id = model.get('app_id')
161
235
 
162
- base = os.environ.get('CLARIFAI_API_BASE', 'https://api.clarifai.com')
163
-
164
- self._client = BaseClient(user_id=user_id, app_id=app_id, base=base)
236
+ self._base_api = os.environ.get('CLARIFAI_API_BASE', 'https://api.clarifai.com')
237
+ self._client = BaseClient(user_id=user_id, app_id=app_id, base=self._base_api)
165
238
 
166
239
  return self._client
167
240
 
@@ -468,6 +541,18 @@ class ModelUploader:
468
541
  file_size = os.path.getsize(self.tar_file)
469
542
  logger.info(f"Size of the tar is: {file_size} bytes")
470
543
 
544
+ self.storage_request_size = self._get_tar_file_content_size(file_path)
545
+ if not download_checkpoints and self.config.get("checkpoints"):
546
+ # Get the checkpoint size to add to the storage request.
547
+ # First check for the env variable, then try querying huggingface. If all else fails, use the default.
548
+ checkpoint_size = os.environ.get('CHECKPOINT_SIZE_BYTES', 0)
549
+ if not checkpoint_size:
550
+ _, repo_id, _ = self._validate_config_checkpoints()
551
+ checkpoint_size = HuggingFaceLoader.get_huggingface_checkpoint_total_size(repo_id)
552
+ if not checkpoint_size:
553
+ checkpoint_size = self.DEFAULT_CHECKPOINT_SIZE
554
+ self.storage_request_size += checkpoint_size
555
+
471
556
  self.maybe_create_model()
472
557
  if not self.check_model_exists():
473
558
  logger.error(f"Failed to create model: {self.model_proto.id}")
@@ -542,6 +627,7 @@ class ModelUploader:
542
627
  model_id=self.model_proto.id,
543
628
  model_version=model_version_proto,
544
629
  total_size=file_size,
630
+ storage_request_size=self.storage_request_size,
545
631
  is_v3=self.is_v3,
546
632
  ))
547
633
  return result
@@ -589,19 +675,19 @@ class ModelUploader:
589
675
  return False
590
676
 
591
677
 
592
- def main(folder, download_checkpoints, skip_dockerfile):
593
- uploader = ModelUploader(folder)
678
+ def upload_model(folder, download_checkpoints, skip_dockerfile):
679
+ builder = ModelBuilder(folder)
594
680
  if download_checkpoints:
595
- uploader.download_checkpoints()
681
+ builder.download_checkpoints()
596
682
  if not skip_dockerfile:
597
- uploader.create_dockerfile()
598
- exists = uploader.check_model_exists()
683
+ builder.create_dockerfile()
684
+ exists = builder.check_model_exists()
599
685
  if exists:
600
686
  logger.info(
601
- f"Model already exists at {uploader.model_url}, this upload will create a new version for it."
687
+ f"Model already exists at {builder.model_url}, this upload will create a new version for it."
602
688
  )
603
689
  else:
604
- logger.info(f"New model will be created at {uploader.model_url} with it's first version.")
690
+ logger.info(f"New model will be created at {builder.model_url} with it's first version.")
605
691
 
606
692
  input("Press Enter to continue...")
607
- uploader.upload_model_version(download_checkpoints)
693
+ builder.upload_model_version(download_checkpoints)
@@ -1,6 +1,4 @@
1
1
  import hashlib
2
- import importlib.util
3
- import inspect
4
2
  import os
5
3
  import platform
6
4
  import shutil
@@ -14,9 +12,8 @@ import venv
14
12
 
15
13
  from clarifai_grpc.grpc.api import resources_pb2, service_pb2
16
14
  from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
17
- from clarifai_protocol import BaseRunner
18
15
 
19
- from clarifai.runners.models.model_upload import ModelUploader
16
+ from clarifai.runners.models.model_builder import ModelBuilder
20
17
  from clarifai.runners.utils.url_fetcher import ensure_urls_downloaded
21
18
  from clarifai.utils.logging import logger
22
19
 
@@ -27,9 +24,9 @@ class ModelRunLocally:
27
24
  self.model_path = model_path
28
25
  self.requirements_file = os.path.join(self.model_path, "requirements.txt")
29
26
 
30
- # ModelUploader contains multiple useful methods to interact with the model
31
- self.uploader = ModelUploader(self.model_path, download_validation_only=True)
32
- self.config = self.uploader.config
27
+ # ModelBuilder contains multiple useful methods to interact with the model
28
+ self.builder = ModelBuilder(self.model_path, download_validation_only=True)
29
+ self.config = self.builder.config
33
30
 
34
31
  def _requirements_hash(self):
35
32
  """Generate a hash of the requirements file."""
@@ -91,38 +88,10 @@ class ModelRunLocally:
91
88
  self.clean_up()
92
89
  sys.exit(1)
93
90
 
94
- def _get_model_runner(self):
95
- """Dynamically import the runner class from the model file."""
96
-
97
- # import the runner class that to be implement by the user
98
- runner_path = os.path.join(self.model_path, "1", "model.py")
99
-
100
- # arbitrary name given to the module to be imported
101
- module = "runner_module"
102
-
103
- spec = importlib.util.spec_from_file_location(module, runner_path)
104
- runner_module = importlib.util.module_from_spec(spec)
105
- sys.modules[module] = runner_module
106
- spec.loader.exec_module(runner_module)
107
-
108
- # Find all classes in the model.py file that are subclasses of BaseRunner
109
- classes = [
110
- cls for _, cls in inspect.getmembers(runner_module, inspect.isclass)
111
- if issubclass(cls, BaseRunner) and cls.__module__ == runner_module.__name__
112
- ]
113
-
114
- # Ensure there is exactly one subclass of BaseRunner in the model.py file
115
- if len(classes) != 1:
116
- raise Exception("Expected exactly one subclass of BaseRunner, found: {}".format(
117
- len(classes)))
118
-
119
- MyRunner = classes[0]
120
- return MyRunner
121
-
122
91
  def _build_request(self):
123
92
  """Create a mock inference request for testing the model."""
124
93
 
125
- model_version_proto = self.uploader.get_model_version_proto()
94
+ model_version_proto = self.builder.get_model_version_proto()
126
95
  model_version_proto.id = "model_version"
127
96
 
128
97
  return service_pb2.PostModelOutputsRequest(
@@ -142,8 +111,8 @@ class ModelRunLocally:
142
111
  for i in range(1):
143
112
  yield request
144
113
 
145
- def _run_model_inference(self, runner):
146
- """Perform inference using the runner."""
114
+ def _run_model_inference(self, model):
115
+ """Perform inference using the model."""
147
116
  request = self._build_request()
148
117
  stream_request = self._build_stream_request()
149
118
 
@@ -152,7 +121,7 @@ class ModelRunLocally:
152
121
  generate_response = None
153
122
  stream_response = None
154
123
  try:
155
- predict_response = runner.predict(request)
124
+ predict_response = model.predict(request)
156
125
  except NotImplementedError:
157
126
  logger.info("Model does not implement predict() method.")
158
127
  except Exception as e:
@@ -172,7 +141,7 @@ class ModelRunLocally:
172
141
  logger.info(f"Model Prediction succeeded: {predict_response}")
173
142
 
174
143
  try:
175
- generate_response = runner.generate(request)
144
+ generate_response = model.generate(request)
176
145
  except NotImplementedError:
177
146
  logger.info("Model does not implement generate() method.")
178
147
  except Exception as e:
@@ -194,7 +163,7 @@ class ModelRunLocally:
194
163
  f"Model Prediction succeeded for generate and first response: {generate_first_res}")
195
164
 
196
165
  try:
197
- stream_response = runner.stream(stream_request)
166
+ stream_response = model.stream(stream_request)
198
167
  except NotImplementedError:
199
168
  logger.info("Model does not implement stream() method.")
200
169
  except Exception as e:
@@ -217,16 +186,10 @@ class ModelRunLocally:
217
186
 
218
187
  def _run_test(self):
219
188
  """Test the model locally by making a prediction."""
220
- # construct MyRunner which will call load_model()
221
- MyRunner = self._get_model_runner()
222
- runner = MyRunner(
223
- runner_id="n/a",
224
- nodepool_id="n/a",
225
- compute_cluster_id="n/a",
226
- user_id="n/a",
227
- )
189
+ # Create the model
190
+ model = self.builder.create_model_instance()
228
191
  # send an inference.
229
- self._run_model_inference(runner)
192
+ self._run_model_inference(model)
230
193
 
231
194
  def test_model(self):
232
195
  """Test the model by running it locally in the virtual environment."""
@@ -274,7 +237,7 @@ class ModelRunLocally:
274
237
 
275
238
  command = [
276
239
  self.python_executable, "-m", "clarifai.runners.server", "--model_path", self.model_path,
277
- "--start_dev_server", "--port",
240
+ "--grpc", "--port",
278
241
  str(port)
279
242
  ]
280
243
  try:
@@ -324,7 +287,9 @@ class ModelRunLocally:
324
287
  # Comment out the COPY instruction that copies the current folder
325
288
  modified_lines = []
326
289
  for line in lines:
327
- if 'COPY .' in line and '/app/model_dir/main' in line:
290
+ if 'COPY' in line and '/home/nonroot/main' in line:
291
+ modified_lines.append(f'# {line}')
292
+ elif 'download-checkpoints' in line and '/home/nonroot/main' in line:
328
293
  modified_lines.append(f'# {line}')
329
294
  else:
330
295
  modified_lines.append(line)
@@ -375,7 +340,7 @@ class ModelRunLocally:
375
340
  if self._gpu_is_available():
376
341
  cmd.extend(["--gpus", "all"])
377
342
  # Add volume mappings
378
- cmd.extend(["-v", f"{self.model_path}:/app/model_dir/main"])
343
+ cmd.extend(["-v", f"{self.model_path}:/home/nonroot/main"])
379
344
  # Add environment variables
380
345
  if env_vars:
381
346
  for key, value in env_vars.items():
@@ -383,9 +348,7 @@ class ModelRunLocally:
383
348
  # Add the image name
384
349
  cmd.append(image_name)
385
350
  # update the CMD to run the server
386
- cmd.extend(
387
- ["--model_path", "/app/model_dir/main", "--start_dev_server", "--port",
388
- str(port)])
351
+ cmd.extend(["--model_path", "/home/nonroot/main", "--grpc", "--port", str(port)])
389
352
  # Run the container
390
353
  process = subprocess.Popen(cmd,)
391
354
  logger.info(
@@ -426,7 +389,7 @@ class ModelRunLocally:
426
389
  # update the entrypoint for testing the model
427
390
  cmd.extend(["--entrypoint", "python"])
428
391
  # Add volume mappings
429
- cmd.extend(["-v", f"{self.model_path}:/app/model_dir/main"])
392
+ cmd.extend(["-v", f"{self.model_path}:/home/nonroot/main"])
430
393
  # Add environment variables
431
394
  if env_vars:
432
395
  for key, value in env_vars.items():
@@ -436,7 +399,7 @@ class ModelRunLocally:
436
399
  # update the CMD to test the model inside the container
437
400
  cmd.extend([
438
401
  "-c",
439
- "from clarifai.runners.models.model_run_locally import ModelRunLocally; ModelRunLocally('/app/model_dir/main')._run_test()"
402
+ "from clarifai.runners.models.model_run_locally import ModelRunLocally; ModelRunLocally('/home/nonroot/main')._run_test()"
440
403
  ])
441
404
  # Run the container
442
405
  subprocess.check_call(cmd)
@@ -518,11 +481,11 @@ def main(model_path,
518
481
  )
519
482
  sys.exit(1)
520
483
  manager = ModelRunLocally(model_path)
521
- manager.uploader.download_checkpoints()
484
+ manager.builder.download_checkpoints()
522
485
  if inside_container:
523
486
  if not manager.is_docker_installed():
524
487
  sys.exit(1)
525
- manager.uploader.create_dockerfile()
488
+ manager.builder.create_dockerfile()
526
489
  image_tag = manager._docker_hash()
527
490
  image_name = f"{manager.config['model']['id']}:{image_tag}"
528
491
  container_name = manager.config['model']['id']
@@ -10,16 +10,14 @@ from ..utils.url_fetcher import ensure_urls_downloaded
10
10
  from .model_class import ModelClass
11
11
 
12
12
 
13
- class ModelRunner(BaseRunner, ModelClass, HealthProbeRequestHandler):
13
+ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
14
14
  """
15
15
  This is a subclass of the runner class which will handle only the work items relevant to models.
16
-
17
- It is also a subclass of ModelClass so that any subclass of ModelRunner will need to just
18
- implement predict(), generate() and stream() methods and load_model() if needed.
19
16
  """
20
17
 
21
18
  def __init__(
22
19
  self,
20
+ model: ModelClass,
23
21
  runner_id: str,
24
22
  nodepool_id: str,
25
23
  compute_cluster_id: str,
@@ -43,7 +41,7 @@ class ModelRunner(BaseRunner, ModelClass, HealthProbeRequestHandler):
43
41
  num_parallel_polls,
44
42
  **kwargs,
45
43
  )
46
- self.load_model()
44
+ self.model = model
47
45
 
48
46
  # After model load successfully set the health probe to ready and startup
49
47
  HealthProbeRequestHandler.is_ready = True
@@ -83,7 +81,7 @@ class ModelRunner(BaseRunner, ModelClass, HealthProbeRequestHandler):
83
81
  request = runner_item.post_model_outputs_request
84
82
  ensure_urls_downloaded(request)
85
83
 
86
- resp = self.predict_wrapper(request)
84
+ resp = self.model.predict_wrapper(request)
87
85
  successes = [o.status.code == status_code_pb2.SUCCESS for o in resp.outputs]
88
86
  if all(successes):
89
87
  status = status_pb2.Status(
@@ -113,7 +111,7 @@ class ModelRunner(BaseRunner, ModelClass, HealthProbeRequestHandler):
113
111
  request = runner_item.post_model_outputs_request
114
112
  ensure_urls_downloaded(request)
115
113
 
116
- for resp in self.generate_wrapper(request):
114
+ for resp in self.model.generate_wrapper(request):
117
115
  successes = []
118
116
  for output in resp.outputs:
119
117
  if not output.HasField('status') or not output.status.code:
@@ -141,7 +139,7 @@ class ModelRunner(BaseRunner, ModelClass, HealthProbeRequestHandler):
141
139
  def runner_item_stream(self, runner_item_iterator: Iterator[service_pb2.RunnerItem]
142
140
  ) -> Iterator[service_pb2.RunnerItemOutput]:
143
141
  # Call the generate() method the underlying model implements.
144
- for resp in self.stream_wrapper(pmo_iterator(runner_item_iterator)):
142
+ for resp in self.model.stream_wrapper(pmo_iterator(runner_item_iterator)):
145
143
  successes = []
146
144
  for output in resp.outputs:
147
145
  if not output.HasField('status') or not output.status.code: