clarifai 10.0.1__py3-none-any.whl → 10.1.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. clarifai/client/app.py +23 -43
  2. clarifai/client/base.py +46 -4
  3. clarifai/client/dataset.py +85 -33
  4. clarifai/client/input.py +35 -7
  5. clarifai/client/model.py +192 -11
  6. clarifai/client/module.py +8 -6
  7. clarifai/client/runner.py +3 -1
  8. clarifai/client/search.py +6 -3
  9. clarifai/client/user.py +14 -12
  10. clarifai/client/workflow.py +8 -5
  11. clarifai/datasets/upload/features.py +3 -0
  12. clarifai/datasets/upload/image.py +57 -26
  13. clarifai/datasets/upload/loaders/README.md +3 -4
  14. clarifai/datasets/upload/loaders/xview_detection.py +9 -5
  15. clarifai/datasets/upload/utils.py +23 -7
  16. clarifai/models/model_serving/README.md +113 -121
  17. clarifai/models/model_serving/__init__.py +2 -0
  18. clarifai/models/model_serving/cli/_utils.py +53 -0
  19. clarifai/models/model_serving/cli/base.py +14 -0
  20. clarifai/models/model_serving/cli/build.py +79 -0
  21. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  22. clarifai/models/model_serving/cli/create.py +171 -0
  23. clarifai/models/model_serving/cli/example_cli.py +34 -0
  24. clarifai/models/model_serving/cli/login.py +26 -0
  25. clarifai/models/model_serving/cli/upload.py +182 -0
  26. clarifai/models/model_serving/constants.py +20 -0
  27. clarifai/models/model_serving/docs/cli.md +150 -0
  28. clarifai/models/model_serving/docs/concepts.md +229 -0
  29. clarifai/models/model_serving/docs/dependencies.md +1 -1
  30. clarifai/models/model_serving/docs/inference_parameters.md +112 -107
  31. clarifai/models/model_serving/docs/model_types.md +16 -17
  32. clarifai/models/model_serving/model_config/__init__.py +4 -2
  33. clarifai/models/model_serving/model_config/base.py +369 -0
  34. clarifai/models/model_serving/model_config/config.py +219 -224
  35. clarifai/models/model_serving/model_config/inference_parameter.py +5 -0
  36. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -24
  37. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -18
  38. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -18
  39. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -18
  40. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -18
  41. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -18
  42. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -28
  43. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -18
  44. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -18
  45. clarifai/models/model_serving/{models → model_config}/output.py +8 -0
  46. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  47. clarifai/models/model_serving/model_config/{serializer.py → triton/serializer.py} +3 -1
  48. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  49. clarifai/models/model_serving/{models/model_types.py → model_config/triton/wrappers.py} +4 -4
  50. clarifai/models/model_serving/{models → repo_build}/__init__.py +2 -0
  51. clarifai/models/model_serving/repo_build/build.py +198 -0
  52. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  53. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  54. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  55. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  56. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  57. clarifai/models/model_serving/{models/pb_model.py → repo_build/static_files/triton/model.py} +15 -14
  58. clarifai/models/model_serving/utils.py +21 -0
  59. clarifai/rag/rag.py +67 -23
  60. clarifai/rag/utils.py +21 -5
  61. clarifai/utils/evaluation/__init__.py +427 -0
  62. clarifai/utils/evaluation/helpers.py +522 -0
  63. clarifai/utils/logging.py +7 -0
  64. clarifai/utils/model_train.py +3 -1
  65. clarifai/versions.py +1 -1
  66. {clarifai-10.0.1.dist-info → clarifai-10.1.1.dist-info}/METADATA +58 -10
  67. clarifai-10.1.1.dist-info/RECORD +115 -0
  68. clarifai-10.1.1.dist-info/entry_points.txt +2 -0
  69. clarifai/datasets/upload/loaders/coco_segmentation.py +0 -98
  70. clarifai/models/model_serving/cli/deploy_cli.py +0 -123
  71. clarifai/models/model_serving/cli/model_zip.py +0 -61
  72. clarifai/models/model_serving/cli/repository.py +0 -89
  73. clarifai/models/model_serving/docs/custom_config.md +0 -33
  74. clarifai/models/model_serving/docs/output.md +0 -28
  75. clarifai/models/model_serving/models/default_test.py +0 -281
  76. clarifai/models/model_serving/models/inference.py +0 -50
  77. clarifai/models/model_serving/models/test.py +0 -64
  78. clarifai/models/model_serving/pb_model_repository.py +0 -108
  79. clarifai-10.0.1.dist-info/RECORD +0 -103
  80. clarifai-10.0.1.dist-info/entry_points.txt +0 -4
  81. {clarifai-10.0.1.dist-info → clarifai-10.1.1.dist-info}/LICENSE +0 -0
  82. {clarifai-10.0.1.dist-info → clarifai-10.1.1.dist-info}/WHEEL +0 -0
  83. {clarifai-10.0.1.dist-info → clarifai-10.1.1.dist-info}/top_level.txt +0 -0
@@ -6,7 +6,7 @@ from concurrent.futures import ThreadPoolExecutor
6
6
  from multiprocessing import cpu_count
7
7
  from typing import DefaultDict, Dict, List
8
8
 
9
- import cv2
9
+ from PIL import Image
10
10
  from tqdm import tqdm
11
11
 
12
12
  from clarifai.datasets.upload.base import ClarifaiDataLoader
@@ -47,12 +47,15 @@ class xviewDetectionDataLoader(ClarifaiDataLoader):
47
47
 
48
48
  self.load_data()
49
49
 
50
+ @property
51
+ def task(self):
52
+ return "visual_detection"
53
+
50
54
  def compress_tiff(self, img_path: str) -> None:
51
55
  """Compress tiff image"""
52
56
  img_comp_path = os.path.join(self.img_comp_dir, os.path.basename(img_path))
53
- img_arr = cv2.imread(img_path)
54
- cv2.imwrite(
55
- img_comp_path, img_arr, params=(cv2.IMWRITE_TIFF_COMPRESSION, 8)) # 8: Adobe Deflate
57
+ img_arr = Image.open(img_path)
58
+ img_arr.save(img_comp_path, 'TIFF', compression='tiff_deflate')
56
59
 
57
60
  def preprocess(self):
58
61
  """Compress the tiff images to comply with clarifai grpc image encoding limit(<20MB) Uses ADOBE_DEFLATE compression algorithm"""
@@ -129,7 +132,8 @@ class xviewDetectionDataLoader(ClarifaiDataLoader):
129
132
  _id = os.path.splitext(os.path.basename(self.image_paths[index]))[0]
130
133
  image_path = self.image_paths[index]
131
134
 
132
- image_height, image_width = cv2.imread(image_path).shape[:2]
135
+ image = Image.open(image_path)
136
+ image_width, image_height = image.size
133
137
  annots = []
134
138
  class_names = []
135
139
  for bbox, concept in zip(self.all_data[_id]['bboxes'], self.all_data[_id]['concepts']):
@@ -53,16 +53,19 @@ class DisplayUploadStatus:
53
53
 
54
54
  def __init__(self, dataloader: ClarifaiDataLoader,
55
55
  dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse],
56
- dataset_info_dict: Dict[str, str]) -> None:
56
+ dataset_info_dict: Dict[str, str],
57
+ pre_upload_stats: Tuple[Dict[str, int], Dict[str, int]]) -> None:
57
58
  """Initialize the class.
58
59
  Args:
59
60
  dataloader: ClarifaiDataLoader object
60
61
  dataset_metrics_response: The dataset version metrics response from the server.
61
62
  dataset_info_dict: The dataset info dictionary.
63
+ pre_upload_stats: The pre upload stats for the dataset.
62
64
  """
63
65
  self.dataloader = dataloader
64
66
  self.dataset_metrics_response = dataset_metrics_response
65
67
  self.dataset_info_dict = dataset_info_dict
68
+ self.pre_upload_stats = pre_upload_stats
66
69
 
67
70
  self.display()
68
71
 
@@ -71,7 +74,18 @@ class DisplayUploadStatus:
71
74
  from rich.console import Console
72
75
 
73
76
  local_inputs_count, local_annotations_dict = self.get_dataloader_stats()
74
- uploaded_inputs_dict, uploaded_annotations_dict = self.get_uploaded_dataset_stats()
77
+ uploaded_inputs_dict, uploaded_annotations_dict = self.get_dataset_version_stats(
78
+ self.dataset_metrics_response)
79
+
80
+ # Subtract the pre upload stats from the uploaded stats
81
+ uploaded_inputs_dict = {
82
+ key: int(uploaded_inputs_dict[key]) - int(self.pre_upload_stats[0].get(key, 0))
83
+ for key in uploaded_inputs_dict
84
+ }
85
+ uploaded_annotations_dict = {
86
+ key: uploaded_annotations_dict[key] - self.pre_upload_stats[1].get(key, 0)
87
+ for key in uploaded_annotations_dict
88
+ }
75
89
 
76
90
  self.local_annotations_count = sum(local_annotations_dict.values())
77
91
  self.uploaded_annotations_count = sum(uploaded_annotations_dict.values())
@@ -99,9 +113,6 @@ class DisplayUploadStatus:
99
113
  """
100
114
  from clarifai.constants.dataset import DATASET_UPLOAD_TASKS
101
115
 
102
- if not isinstance(self.dataloader, ClarifaiDataLoader):
103
- raise UserError("Dataloader is not an instance of ClarifaiDataLoader")
104
-
105
116
  task = self.dataloader.task
106
117
  if task not in DATASET_UPLOAD_TASKS:
107
118
  raise UserError(
@@ -113,8 +124,13 @@ class DisplayUploadStatus:
113
124
  local_annotations_dict[key] += len(getattr(self.dataloader[i], attr))
114
125
  return local_inputs_count, local_annotations_dict
115
126
 
116
- def get_uploaded_dataset_stats(self) -> Tuple[Dict[str, int], Dict[str, int]]:
127
+ @staticmethod
128
+ def get_dataset_version_stats(
129
+ dataset_metrics_response: Type[MultiDatasetVersionMetricsGroupResponse]
130
+ ) -> Tuple[Dict[str, int], Dict[str, int]]:
117
131
  """Parse the response from the server for the dataset version metrics groups.
132
+ Args:
133
+ dataset_metrics_response: The dataset version metrics response from the server.
118
134
 
119
135
  Returns:
120
136
  uploaded_inputs_dict (Dict[str, int]): The input statistics for the dataset.
@@ -123,7 +139,7 @@ class DisplayUploadStatus:
123
139
  dataset_statistics = []
124
140
  uploaded_inputs_dict = {}
125
141
  uploaded_annotations_dict = dict(concepts=0, bboxes=0, polygons=0)
126
- dict_response = MessageToDict(self.dataset_metrics_response)
142
+ dict_response = MessageToDict(dataset_metrics_response)
127
143
 
128
144
  for data in dict_response["datasetVersionMetricsGroups"]:
129
145
  if isinstance(data["value"], str):
@@ -1,110 +1,55 @@
1
- ## Clarifai Model Serving: Deploy Your Machine Learning Models to Clarifai.
1
+ # Clarifai Model Serving
2
2
 
3
- Build and easily deploy machine learning models to Clarifai for inference using the [Nvidia Triton Inference Server](https://github.com/triton-inference-server/server).
3
+ ## Overview
4
4
 
5
- ## QuickStart Guide: Build a deployment ready model.
5
+ Model Serving is a part of user journey at Clarifai offers a user-friendly interface for deploying your local model into production with Clarifai, featuring:
6
6
 
7
- A step by step guide to building your own triton inference model and deploying it into a Clarifai app.
7
+ * A convenient command-line interface (CLI)
8
+ * Easy implementation and testing in Python
9
+ * No need for MLops expertise.
8
10
 
9
- 1. Generate a triton model repository via commandline.
10
- ```console
11
- clarifai-model-upload-init --model_name <Your model name> \
12
- --model_type <select model type from available ones> \
13
- --repo_dir <directory in which to create your model repository> \
14
- --image_shape <(H, W) dims for models with an image input type. H and W each have a max value of 1024> \
15
- --max_bs <Max batch size. Default is 1.>
16
- ```
17
- 2. 1. Edit the `requirements.txt` file with dependencies needed to run inference on your model and the `labels.txt` (if available in dir) with the labels your model is to predict.
18
- 2. Add your model loading and inference code inside `inference.py` script of the generated model repository under the `setup()` and `predict()` functions respectively. Refer to The [Inference Script section]() for a description of this file.
19
- 3. Inference parameters (optional): you can define some inference parameters that can be adjusted on model view of Clarifai platform when making prediction. Follow [this doc](./docs/inference_parameters.md) to build the json file.
20
- 3. Testing (Recommend) your implementation locally by running `<your_triton_folder>/1/test.py` with basic predefined tests.
21
- To avoid missing dependencies when deploying, recommend to use conda to create clean environment. Then install everything in `requirements.txt`. Follow instruction inside [test.py](./models/test.py) for implementing custom tests.
22
- * Create conda env and install requirements:
23
- ```bash
24
- # create env (note: only python version 3.8 is supported currently)
25
- conda create -n <your_env> python=3.8
26
- # activate it
27
- conda activate <your_env>
28
- # install dependencies
29
- pip install -r <your_triton_folder>/requirements.txt
30
- ```
31
- * Then run the test by using pytest:
11
+ ## Quickstart Guide
12
+
13
+ Quick example for deploying a `text-to-text` model
14
+
15
+ ### Initialize a Clarifai model repository
16
+
17
+ Suppose your working directory name is `your_model_dir`. Then run
32
18
 
33
19
  ```bash
34
- # Run the test
35
- pytest ./your_triton_folder/1/test.py
36
- # to see std output
37
- pytest --log-cli-level=INFO -s ./your_triton_folder/1/test.py
38
- ```
39
- 4. Generate a zip of your triton model for deployment via commandline.
40
- ```console
41
- clarifai-triton-zip --triton_model_repository <path to triton model repository to be compressed> \
42
- --zipfile_name <name of the triton model zip> (Recommended to use <model_name>_<model-type> convention for naming)
43
- ```
44
- 5. Upload the generated zip to a public file storage service to get a URL to the zip. This URL must be publicly accessible and downloadable as it's necessary for the last step: uploading the model to a Clarifai app.
45
- 6. Set your Clarifai auth credentials as environment variables.
46
- ```console
47
- export CLARIFAI_USER_ID=<your clarifai user_id>
48
- export CLARIFAI_APP_ID=<your clarifai app_id>
49
- export CLARIFAI_PAT=<your clarifai PAT>
50
- ```
51
- 7. Upload your model to Clarifai. Please ensure that your configuration field maps adhere to [this](https://github.com/Clarifai/clarifai-python-utils/blob/main/clarifai/models/model_serving/model_config/deploy.py)
52
- ```console
53
- clarifai-upload-model --url <URL to your model zip. Your zip file name is expected to have "zipfile_name" format (in clarifai-triton-zip), if not you need to specify your model_id and model_type> \
54
- --model_id <Your model ID on the platform> \
55
- --model_type <Clarifai model types> \
56
- --desc <A description of your model> \
57
- --update_version <Optional. Add new version of existing model> \
58
- --infer_param <Optional. Path to json file contains inference parameters>
20
+ $ clarifai create model --type text-to-text --working-dir your_model_dir
21
+ $ cd your_model_dir
59
22
  ```
60
23
 
61
- * Finally, navigate to your Clarifai app models and check that the deployed model appears. Click it on the model name to go the model versions table to track the status of the model deployment.
62
-
63
- ## Triton Model Repository
64
- ```diff
65
- <model_name>/
66
- ├── config.pbtx
67
- ├── requirements.txt
68
- ├── labels.txt (If applicable for given model-type)
69
- - ├── triton_conda.yaml
70
- └── 1/
71
- ├── __init__.py
72
- ├── inference.py
73
- ├── test.py
74
- └── model.py
75
- ```
24
+ In `your_model_dir` folder you will see essential files for deployment process
76
25
 
77
- A generated triton model repository looks as illustrated in the directory tree above. Any additional files such as model checkpoints and folders needed at inference time must all be placed under the `1/` directory.
26
+ ```bash
27
+ your_model_dir
28
+ ├── clarifai_config.yaml
29
+ ├── inference.py
30
+ ├── test.py
31
+ └── requirements.txt
32
+ ```
78
33
 
79
- - File Descriptions
34
+ ### Implementation
80
35
 
81
- | Filename | Description & Use |
82
- | --- | --- |
83
- | `config.pbtxt` | Contains the triton model configuration used by the triton inference server to guide inference requests processing. |
84
- | `requirements.txt` | Contains dependencies needed by a user model to successfully make predictions.|
85
- | `labels.txt` | Contains labels listed one per line, a model is trained to predict. The order of labels should match the model predicted class indexes. |
86
- | `1/inference.py` | The inference script where users write their inference code. |
87
- | `1/model.py` | The triton python backend model file run to serve inference requests. |
88
- | `1/test.py` | Contains some predefined tests in order to test inference implementation and dependencies locally. |
36
+ Write your code in class `InferenceModel` which is an interface between your model and Clarifai server in `inference.py`, there are 2 functions you must implement:
89
37
 
90
- ## Inference Script
38
+ * `__init__`: load your model checkpoint once.
39
+ * `predict`: make prediction, called everytime when you make request from API.
91
40
 
92
- An `inference.py` script with template code is generated during the triton model repository generation.
93
- **This is the script where users write their inference code**.
94
- This script is composed of a single class that contains a default init method and the `get_predictions()` method whose names mustn't be changed.
41
+ For example, a complete implementation of a hf text-generation model
95
42
 
96
43
  ```python
97
- """User model inference script."""
98
-
99
44
  import os
100
- from pathlib import Path
101
-
102
- from clarifai.models.model_serving.model_config import (ModelTypes, get_model_config)
45
+ from typing import Dict, Union
46
+ from clarifai.models.model_serving.model_config import *
103
47
 
104
- config = get_model_config("MODEL_TYPE_PLACEHOLDER") # Input your model type
48
+ import torch
49
+ from transformers import AutoTokenizer
50
+ import transformers
105
51
 
106
-
107
- class InferenceModel:
52
+ class InferenceModel(TextToText):
108
53
  """User model inference class."""
109
54
 
110
55
  def __init__(self) -> None:
@@ -112,52 +57,99 @@ class InferenceModel:
112
57
  Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
113
58
  in this method so they are loaded only once for faster inference.
114
59
  """
115
- self.base_path: Path = os.path.dirname(__file__)
116
- ## sample model loading code:
117
- #self.checkpoint_path: Path = os.path.join(self.base_path, "your checkpoint filename/path")
118
- #self.model: Callable = <load_your_model_here from checkpoint or folder>
119
-
120
- @config.inference.wrap_func
121
- def get_predictions(self, input_data: list, **kwargs) -> list:
122
- """
123
- Main model inference method.
60
+ # current directory
61
+ self.base_path = os.path.dirname(__file__)
62
+ # where you save hf checkpoint in your working dir e.i. `your_model_dir`
63
+ model_path = os.path.join(self.base_path, "checkpoint")
64
+ self.tokenizer = AutoTokenizer.from_pretrained(model_path)
65
+ self.pipeline = transformers.pipeline(
66
+ "text-generation",
67
+ model=model_path,
68
+ torch_dtype=torch.float16,
69
+ device_map="auto",
70
+ )
71
+
72
+ def predict(self, input_data: list,
73
+ inference_parameters: Dict[str, Union[str, float, int]]) -> list:
74
+ """ Custom prediction function for `text-to-text` (also called as `text generation`) model.
124
75
 
125
76
  Args:
126
- -----
127
- input_data: A list of input data item to predict on.
128
- Input data can be an image or text, etc depending on the model type.
129
-
130
- **kwargs: your inference parameters.
77
+ input_data (List[str]): List of text
78
+ inference_parameters (Dict[str, Union[str, float, int]]): your inference parameters
131
79
 
132
80
  Returns:
133
- --------
134
- List of one of the `clarifai.models.model_serving.models.output types` or `config.inference.return_type(your_output)`. Refer to the README/docs
81
+ list of TextOutput
82
+
135
83
  """
84
+ output_sequences = self.pipeline(
85
+ input_data,
86
+ eos_token_id=self.tokenizer.eos_token_id,
87
+ **inference_parameters)
88
+
89
+ # wrap outputs in Clarifai defined output
90
+ return [TextOutput(each[0]) for each in output_sequences]
91
+ ```
136
92
 
137
- # Delete/Comment out line below and add your inference code
138
- raise NotImplementedError()
93
+ Update dependencies in `requirements.txt`
94
+
95
+ ```
96
+ clarifai
97
+ torch=2.1.1
98
+ transformers==4.36.2
99
+ accelerate==0.26.1
139
100
  ```
140
101
 
141
- - `__init__()` used for one-time loading of inference time artifacts such as models, tokenizers, etc that are frequently called during inference to improve inference speed.
102
+ ### Test (optional)
142
103
 
143
- - `get_predictions()` takes a list of input data items whose type depends on the task the model solves, & returns list of predictions.
104
+ > NOTE: Running `test` is also involved in `build` and `upload` command.
144
105
 
145
- `get_predictions()` should return a list of any of the output types defined under [output](docs/output.md) and the predict function MUST be decorated with a task corresponding [@config.inference.wrap_func](docs/model_types.md). The model type decorators are responsible for passing input request batches for prediction and formatting the resultant predictions into triton inference responses.
106
+ Test and play with your implementation by executing `test.py`.
146
107
 
147
- Additional methods can be added to this script's `Infer` class by the user as deemed necessary for their model inference provided they are invoked inside `get_predictions()` if used at inference time.
108
+ Install pytest
148
109
 
149
- ## Next steps
110
+ ```bash
111
+ $ pip install pytest
112
+ ```
150
113
 
151
- - [Model types docs](docs/model_types.md)
152
- - [Model Output types docs](docs/output.md)
153
- - [Dependencies](docs/dependencies.md)
154
- - [Examples](https://github.com/Clarifai/examples)
155
- - [Custom Configs](docs/custom_config.md/)
114
+ Execute test
156
115
 
157
- ## Prerequisites
116
+ ```bash
117
+ $ pytest test.py
118
+ ```
119
+
120
+ ### Build
121
+
122
+ Prepare for deployment step. Run:
123
+
124
+ ```bash
125
+ $ clarifai build model
126
+ ```
158
127
 
159
- * For deployment to Clarifai, you need a [Clarifai account](https://clarifai.com/signup).
128
+ You will obtain `*.clarifai` file, it's simply a zip having all nessecary files in it to get your model work on Clarifai platform.
129
+
130
+ `NOTE`: you need to upload your built file to cloud storage to get direct download `url` for next step
131
+
132
+ ### Deployment
133
+
134
+ Login to Clarifai
135
+
136
+ ```bash
137
+ $ clarifai login
138
+ Get your PAT from https://clarifai.com/settings/security and pass it here: <insert your pat here>
139
+ ```
140
+
141
+ Upload
142
+
143
+ ```bash
144
+ $ clarifai upload model --url <url> --user-app <your_user_id>/<your_app_id> --id <your_model_id>
145
+ ```
160
146
 
161
- ## Testing
147
+ ## Learn More
162
148
 
163
- * Please see https://github.com/Clarifai/clarifai-python/blob/master/clarifai/models/model_serving/models/test.py
149
+ * [Detail Instruction](./docs/concepts.md)
150
+ * [Examples](https://github.com/Clarifai/examples/tree/main/model_upload)
151
+ * [Initialize from example](./docs/cli.md)
152
+ * [CLI usage](./docs/cli.md)
153
+ * [Inference parameters](./docs/inference_parameters.md)
154
+ * [Model Types](./docs/model_types.md)
155
+ * [Dependencies](./docs/dependencies.md)
@@ -10,3 +10,5 @@
10
10
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
11
  # See the License for the specific language governing permissions and
12
12
  # limitations under the License.
13
+ from .model_config import * # noqa
14
+ from .repo_build import * # noqa
@@ -0,0 +1,53 @@
1
+ from __future__ import annotations # isort: skip
2
+ import os
3
+ import shutil
4
+ import subprocess
5
+ from typing import Dict, Union
6
+
7
+ from ..constants import (CLARIFAI_EXAMPLES_REPO, CLARIFAI_EXAMPLES_REPO_PATH,
8
+ MODEL_UPLOAD_EXAMPLE_FOLDER)
9
+
10
+
11
+ def download_examples_repo(forced_download: bool = False):
12
+
13
+ def _pull():
14
+ subprocess.run(f"git clone {CLARIFAI_EXAMPLES_REPO} {CLARIFAI_EXAMPLES_REPO_PATH}")
15
+
16
+ if not os.path.isdir(CLARIFAI_EXAMPLES_REPO_PATH):
17
+ print(f"Download examples to {CLARIFAI_EXAMPLES_REPO_PATH}")
18
+ _pull()
19
+ else:
20
+ if forced_download:
21
+
22
+ def _rm_dir_readonly(func, path, _):
23
+ import stat
24
+ os.chmod(path, stat.S_IWRITE)
25
+ func(path)
26
+
27
+ print("Removing old examples...")
28
+ shutil.rmtree(CLARIFAI_EXAMPLES_REPO_PATH, onerror=_rm_dir_readonly)
29
+ _pull()
30
+
31
+
32
+ def list_model_upload_examples(
33
+ forced_download: bool = False) -> Dict[str, tuple[str, Union[str, None]]]:
34
+ download_examples_repo(forced_download)
35
+ model_upload_folder = MODEL_UPLOAD_EXAMPLE_FOLDER
36
+ model_upload_path = os.path.join(CLARIFAI_EXAMPLES_REPO_PATH, model_upload_folder)
37
+ examples = {}
38
+ for model_type_ex in os.listdir(model_upload_path):
39
+ _folder = os.path.join(model_upload_path, model_type_ex)
40
+ if os.path.isdir(_folder):
41
+ _walk = list(os.walk(_folder))
42
+ if len(_walk) > 0:
43
+ _, model_names, _files = _walk[0]
44
+ readme = [item for item in _files if "readme" in item.lower()]
45
+ for name in model_names:
46
+ examples.update({
47
+ f"{model_type_ex}/{name}": [
48
+ os.path.join(_folder, name),
49
+ os.path.join(_folder, readme[0]) or None
50
+ ]
51
+ })
52
+
53
+ return examples
@@ -0,0 +1,14 @@
1
+ from abc import ABC, abstractmethod
2
+ from argparse import _SubParsersAction
3
+
4
+
5
+ class BaseClarifaiCli(ABC):
6
+
7
+ @staticmethod
8
+ @abstractmethod
9
+ def register(parser: _SubParsersAction):
10
+ raise NotImplementedError()
11
+
12
+ @abstractmethod
13
+ def run(self):
14
+ raise NotImplementedError()
@@ -0,0 +1,79 @@
1
+ # Copyright 2023 Clarifai, Inc.
2
+ # Licensed under the Apache License, Version 2.0 (the "License");
3
+ # you may not use this file except in compliance with the License.
4
+ # You may obtain a copy of the License at
5
+ #
6
+ # http://www.apache.org/licenses/LICENSE-2.0
7
+ #
8
+ # Unless required by applicable law or agreed to in writing, software
9
+ # distributed under the License is distributed on an "AS IS" BASIS,
10
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
11
+ # See the License for the specific language governing permissions and
12
+ # limitations under the License.
13
+ """Commandline interface for model upload utils."""
14
+ import argparse
15
+ import os
16
+ import subprocess
17
+
18
+ from ..repo_build import RepositoryBuilder
19
+ from .base import BaseClarifaiCli
20
+
21
+
22
+ class BuildCli(BaseClarifaiCli):
23
+
24
+ @staticmethod
25
+ def register(parser: argparse._SubParsersAction):
26
+ parser = parser.add_parser("build", help="Build clarifai model for uploading")
27
+ sub_parser = parser.add_subparsers()
28
+
29
+ BuildModelSubCli.register(sub_parser)
30
+
31
+ parser.set_defaults(func=BuildCli)
32
+
33
+
34
+ class BuildModelSubCli(BaseClarifaiCli):
35
+
36
+ @staticmethod
37
+ def register(parser: argparse._SubParsersAction):
38
+ sub_parser = parser.add_parser("model", help="Build Clarifai model")
39
+ sub_parser.add_argument(
40
+ "path",
41
+ type=str,
42
+ nargs='?',
43
+ help="Path to working directory, default is current directory",
44
+ default=".")
45
+ sub_parser.add_argument(
46
+ "--out-path", type=str, required=False, help="Output path of built model", default=None)
47
+ sub_parser.add_argument(
48
+ "--name",
49
+ type=str,
50
+ required=False,
51
+ help="Name of built file, default is `clarifai_model_id` in config if set or `model`",
52
+ default=None)
53
+ sub_parser.add_argument(
54
+ "--no-test",
55
+ action="store_true",
56
+ help="Trigger this flag to skip testing before uploading")
57
+ sub_parser.set_defaults(func=BuildModelSubCli)
58
+
59
+ def __init__(self, args: argparse.Namespace) -> None:
60
+ self.path = args.path
61
+ self.no_test = args.no_test
62
+ self.test_path = os.path.join(self.path, "test.py")
63
+ self.output_path = args.out_path or self.path
64
+ self.serving_backend = "triton"
65
+ self.name = args.name
66
+
67
+ def run(self):
68
+
69
+ # Run test before uploading
70
+ if not self.no_test:
71
+ assert os.path.exists(
72
+ self.test_path), FileNotFoundError(f"Could not find `test.py` in {self.path}")
73
+ result = subprocess.run(f"pytest -s --log-level=INFO {self.test_path}")
74
+ assert result.returncode == 0, "Test has failed. Please make sure no error exists in your code."
75
+
76
+ # build
77
+ print("Start building...")
78
+ RepositoryBuilder.build(
79
+ self.path, backend=self.serving_backend, output_dir=self.output_path, name=self.name)
@@ -0,0 +1,33 @@
1
+ from argparse import ArgumentParser
2
+
3
+ from .build import BuildCli
4
+ from .create import CreateCli
5
+ from .example_cli import ExampleCli
6
+ from .login import LoginCli
7
+ from .upload import UploadCli
8
+
9
+
10
+ def main():
11
+
12
+ parser = ArgumentParser("clarifai")
13
+ cmd_parser = parser.add_subparsers(help="Clarifai cli helpers")
14
+
15
+ UploadCli.register(cmd_parser)
16
+ CreateCli.register(cmd_parser)
17
+ LoginCli.register(cmd_parser)
18
+ ExampleCli.register(cmd_parser)
19
+ BuildCli.register(cmd_parser)
20
+
21
+ args = parser.parse_args()
22
+
23
+ if not hasattr(args, "func"):
24
+ parser.print_help()
25
+ exit(1)
26
+
27
+ # Run
28
+ service = args.func(args)
29
+ service.run()
30
+
31
+
32
+ if __name__ == "__main__":
33
+ main()