clarifai 10.8.4__py3-none-any.whl → 10.8.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/client/dataset.py +9 -3
- clarifai/constants/dataset.py +1 -1
- clarifai/datasets/upload/base.py +6 -3
- clarifai/datasets/upload/features.py +10 -0
- clarifai/datasets/upload/image.py +22 -13
- clarifai/datasets/upload/multimodal.py +70 -0
- clarifai/datasets/upload/text.py +8 -5
- clarifai/runners/models/model_upload.py +67 -31
- clarifai/runners/utils/loader.py +0 -1
- clarifai/utils/misc.py +6 -0
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/METADATA +2 -1
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/RECORD +17 -60
- clarifai/models/model_serving/README.md +0 -158
- clarifai/models/model_serving/__init__.py +0 -14
- clarifai/models/model_serving/cli/__init__.py +0 -12
- clarifai/models/model_serving/cli/_utils.py +0 -53
- clarifai/models/model_serving/cli/base.py +0 -14
- clarifai/models/model_serving/cli/build.py +0 -79
- clarifai/models/model_serving/cli/clarifai_clis.py +0 -33
- clarifai/models/model_serving/cli/create.py +0 -171
- clarifai/models/model_serving/cli/example_cli.py +0 -34
- clarifai/models/model_serving/cli/login.py +0 -26
- clarifai/models/model_serving/cli/upload.py +0 -183
- clarifai/models/model_serving/constants.py +0 -21
- clarifai/models/model_serving/docs/cli.md +0 -161
- clarifai/models/model_serving/docs/concepts.md +0 -229
- clarifai/models/model_serving/docs/dependencies.md +0 -11
- clarifai/models/model_serving/docs/inference_parameters.md +0 -139
- clarifai/models/model_serving/docs/model_types.md +0 -19
- clarifai/models/model_serving/model_config/__init__.py +0 -16
- clarifai/models/model_serving/model_config/base.py +0 -369
- clarifai/models/model_serving/model_config/config.py +0 -312
- clarifai/models/model_serving/model_config/inference_parameter.py +0 -129
- clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +0 -25
- clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +0 -20
- clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +0 -22
- clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +0 -32
- clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +0 -19
- clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +0 -19
- clarifai/models/model_serving/model_config/output.py +0 -133
- clarifai/models/model_serving/model_config/triton/__init__.py +0 -14
- clarifai/models/model_serving/model_config/triton/serializer.py +0 -136
- clarifai/models/model_serving/model_config/triton/triton_config.py +0 -182
- clarifai/models/model_serving/model_config/triton/wrappers.py +0 -281
- clarifai/models/model_serving/repo_build/__init__.py +0 -14
- clarifai/models/model_serving/repo_build/build.py +0 -198
- clarifai/models/model_serving/repo_build/static_files/_requirements.txt +0 -2
- clarifai/models/model_serving/repo_build/static_files/base_test.py +0 -169
- clarifai/models/model_serving/repo_build/static_files/inference.py +0 -26
- clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +0 -25
- clarifai/models/model_serving/repo_build/static_files/test.py +0 -40
- clarifai/models/model_serving/repo_build/static_files/triton/model.py +0 -75
- clarifai/models/model_serving/utils.py +0 -31
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/LICENSE +0 -0
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/WHEEL +0 -0
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/entry_points.txt +0 -0
- {clarifai-10.8.4.dist-info → clarifai-10.8.6.dist-info}/top_level.txt +0 -0
@@ -1,34 +0,0 @@
|
|
1
|
-
from argparse import Namespace, _SubParsersAction
|
2
|
-
|
3
|
-
from ._utils import list_model_upload_examples
|
4
|
-
from .base import BaseClarifaiCli
|
5
|
-
|
6
|
-
|
7
|
-
class ExampleCli(BaseClarifaiCli):
|
8
|
-
|
9
|
-
@staticmethod
|
10
|
-
def register(parser: _SubParsersAction):
|
11
|
-
creator_parser = parser.add_parser("example", help="Download/List examples of model upload")
|
12
|
-
sub_creator_parser = creator_parser.add_subparsers()
|
13
|
-
|
14
|
-
SubListExampleCli.register(sub_creator_parser)
|
15
|
-
|
16
|
-
creator_parser.set_defaults(func=ExampleCli)
|
17
|
-
|
18
|
-
|
19
|
-
class SubListExampleCli(BaseClarifaiCli):
|
20
|
-
|
21
|
-
@staticmethod
|
22
|
-
def register(parser: _SubParsersAction):
|
23
|
-
_parser = parser.add_parser("list")
|
24
|
-
_parser.add_argument("--force-download", action="store_true", help="Force download examples")
|
25
|
-
_parser.set_defaults(func=SubListExampleCli)
|
26
|
-
|
27
|
-
def __init__(self, args: Namespace) -> None:
|
28
|
-
self.force_download = args.force_download
|
29
|
-
|
30
|
-
def run(self):
|
31
|
-
_list = list_model_upload_examples(self.force_download)
|
32
|
-
print(f"Found {len(_list)} examples")
|
33
|
-
for each in _list:
|
34
|
-
print(f" * {each}")
|
@@ -1,26 +0,0 @@
|
|
1
|
-
import argparse
|
2
|
-
|
3
|
-
from clarifai.models.model_serving.constants import CLARIFAI_PAT_PATH
|
4
|
-
from ..utils import _persist_pat
|
5
|
-
from .base import BaseClarifaiCli
|
6
|
-
|
7
|
-
|
8
|
-
class LoginCli(BaseClarifaiCli):
|
9
|
-
|
10
|
-
@staticmethod
|
11
|
-
def register(parser: argparse._SubParsersAction):
|
12
|
-
upload_parser = parser.add_parser("login", help="Login to Clarifai and save PAT locally")
|
13
|
-
upload_parser.set_defaults(func=LoginCli)
|
14
|
-
|
15
|
-
def __init__(self, args: argparse.Namespace) -> None:
|
16
|
-
pass
|
17
|
-
|
18
|
-
def _parse_config(self):
|
19
|
-
# do something with self.config_path
|
20
|
-
raise NotImplementedError()
|
21
|
-
|
22
|
-
def run(self):
|
23
|
-
msg = "Get your PAT from https://clarifai.com/settings/security and pass it here: "
|
24
|
-
_pat = input(msg)
|
25
|
-
_persist_pat(_pat)
|
26
|
-
print(f"Your PAT is saved at {CLARIFAI_PAT_PATH}")
|
@@ -1,183 +0,0 @@
|
|
1
|
-
# Copyright 2023 Clarifai, Inc.
|
2
|
-
# Licensed under the Apache License, Version 2.0 (the "License");
|
3
|
-
# you may not use this file except in compliance with the License.
|
4
|
-
# You may obtain a copy of the License at
|
5
|
-
#
|
6
|
-
# http://www.apache.org/licenses/LICENSE-2.0
|
7
|
-
#
|
8
|
-
# Unless required by applicable law or agreed to in writing, software
|
9
|
-
# distributed under the License is distributed on an "AS IS" BASIS,
|
10
|
-
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
11
|
-
# See the License for the specific language governing permissions and
|
12
|
-
# limitations under the License.
|
13
|
-
"""Commandline interface for model upload utils."""
|
14
|
-
import argparse
|
15
|
-
import os
|
16
|
-
import subprocess
|
17
|
-
|
18
|
-
from clarifai.models.model_serving.model_config import get_model_config, load_user_config
|
19
|
-
from clarifai.models.model_serving.model_config.inference_parameter import InferParamManager
|
20
|
-
|
21
|
-
from ..constants import BUILT_MODEL_EXT
|
22
|
-
from ..utils import login
|
23
|
-
from .base import BaseClarifaiCli
|
24
|
-
|
25
|
-
|
26
|
-
class UploadCli(BaseClarifaiCli):
|
27
|
-
|
28
|
-
@staticmethod
|
29
|
-
def register(parser: argparse._SubParsersAction):
|
30
|
-
creator_parser = parser.add_parser("upload", help="Upload component to Clarifai platform")
|
31
|
-
sub_creator_parser = creator_parser.add_subparsers()
|
32
|
-
|
33
|
-
UploadModelSubCli.register(sub_creator_parser)
|
34
|
-
|
35
|
-
creator_parser.set_defaults(func=UploadCli)
|
36
|
-
|
37
|
-
|
38
|
-
class UploadModelSubCli(BaseClarifaiCli):
|
39
|
-
|
40
|
-
@staticmethod
|
41
|
-
def register(parser: argparse._SubParsersAction):
|
42
|
-
upload_parser = parser.add_parser("model", help="Upload Clarifai model")
|
43
|
-
upload_parser.add_argument(
|
44
|
-
"path",
|
45
|
-
type=str,
|
46
|
-
nargs='?',
|
47
|
-
help=
|
48
|
-
"Path to working dir to get clarifai_config.yaml or path to yaml. Default is current directory",
|
49
|
-
default=".")
|
50
|
-
upload_parser.add_argument(
|
51
|
-
"--url", type=str, required=False, help="Direct download url of zip file", default=None)
|
52
|
-
upload_parser.add_argument(
|
53
|
-
"--file", type=str, required=False, help="Local built file", default=None)
|
54
|
-
upload_parser.add_argument("--id", type=str, required=False, help="Model ID")
|
55
|
-
upload_parser.add_argument(
|
56
|
-
"--user-app",
|
57
|
-
type=str,
|
58
|
-
required=False,
|
59
|
-
help="User ID and App ID separated by '/', e.g., <user_id>/<app_id>")
|
60
|
-
upload_parser.add_argument(
|
61
|
-
"--no-test",
|
62
|
-
action="store_true",
|
63
|
-
help="Trigger this flag to skip testing before uploading")
|
64
|
-
upload_parser.add_argument(
|
65
|
-
"--no-resume",
|
66
|
-
action="store_true",
|
67
|
-
help="Trigger this flag to not resume uploading local file")
|
68
|
-
upload_parser.add_argument(
|
69
|
-
"--update-version",
|
70
|
-
action="store_true",
|
71
|
-
required=False,
|
72
|
-
help="Update exist model with new version")
|
73
|
-
|
74
|
-
upload_parser.set_defaults(func=UploadModelSubCli)
|
75
|
-
|
76
|
-
def __init__(self, args: argparse.Namespace) -> None:
|
77
|
-
self.no_test = args.no_test
|
78
|
-
self.no_resume = args.no_resume
|
79
|
-
|
80
|
-
working_dir_or_config = args.path
|
81
|
-
# if input a config file, then not running test
|
82
|
-
if working_dir_or_config.endswith(".yaml"):
|
83
|
-
# to folder
|
84
|
-
working_dir_or_config = os.path.split(working_dir_or_config)[0]
|
85
|
-
config_yaml_path = working_dir_or_config
|
86
|
-
self.test_path = None
|
87
|
-
self.no_test = True
|
88
|
-
# if it is a directory - working dir then it must contain config and test
|
89
|
-
else:
|
90
|
-
config_yaml_path = os.path.join(working_dir_or_config, "clarifai_config.yaml")
|
91
|
-
self.test_path = os.path.join(working_dir_or_config, "test.py")
|
92
|
-
|
93
|
-
assert os.path.exists(config_yaml_path), FileNotFoundError(
|
94
|
-
f"`{config_yaml_path}` does not exist")
|
95
|
-
self.config = load_user_config(cfg_path=config_yaml_path)
|
96
|
-
|
97
|
-
self.file = args.file
|
98
|
-
self.url = args.url
|
99
|
-
if self.file:
|
100
|
-
assert not self.url, ValueError("Expected either file or url, not both.")
|
101
|
-
assert os.path.exists(self.file), FileNotFoundError
|
102
|
-
elif self.url:
|
103
|
-
if len(self.url.split(":")) == 1:
|
104
|
-
# if URL has no scheme, default to https
|
105
|
-
self.url = f"https://{self.url}"
|
106
|
-
assert self.url.startswith("http") or self.url.startswith("https") or self.url.startswith(
|
107
|
-
"s3"
|
108
|
-
), f"Invalid URL scheme, supported schemes are 'http', 'https', or 's3'. Got {self.url}"
|
109
|
-
self.file = None
|
110
|
-
else:
|
111
|
-
for _fname in os.listdir(working_dir_or_config):
|
112
|
-
if _fname.endswith(BUILT_MODEL_EXT):
|
113
|
-
self.file = os.path.join(working_dir_or_config, _fname)
|
114
|
-
break
|
115
|
-
assert self.file, ValueError(
|
116
|
-
f"Not using url/file but also not found built file with extension {BUILT_MODEL_EXT}")
|
117
|
-
|
118
|
-
self.user_id, self.app_id = "", ""
|
119
|
-
user_app = args.user_app
|
120
|
-
self.url: str = args.url
|
121
|
-
self.update_version = args.update_version
|
122
|
-
|
123
|
-
clarifai_cfg = self.config.clarifai_model
|
124
|
-
self.url: str = args.url
|
125
|
-
self.id = args.id or clarifai_cfg.clarifai_model_id
|
126
|
-
self.type = clarifai_cfg.type
|
127
|
-
self.desc = clarifai_cfg.description
|
128
|
-
self.infer_param = clarifai_cfg.inference_parameters
|
129
|
-
user_app = user_app or clarifai_cfg.clarifai_user_app_id
|
130
|
-
|
131
|
-
if user_app:
|
132
|
-
user_app = user_app.split('/')
|
133
|
-
assert len(
|
134
|
-
user_app
|
135
|
-
) == 2, f"id must be combination of user_id and app_id separated by `/`, e.g. <user_id>/<app_id>. Got {args.id}"
|
136
|
-
self.user_id, self.app_id = user_app
|
137
|
-
|
138
|
-
login()
|
139
|
-
|
140
|
-
def run(self):
|
141
|
-
from clarifai.client import App, Model
|
142
|
-
|
143
|
-
# Run test before uploading
|
144
|
-
if not self.no_test:
|
145
|
-
assert os.path.exists(self.test_path), FileNotFoundError(f"Not found {self.test_path}")
|
146
|
-
result = subprocess.run(f"pytest -s --log-level=INFO {self.test_path}", shell=True)
|
147
|
-
assert result.returncode == 0, "Test has failed. Please make sure no error exists in your code."
|
148
|
-
|
149
|
-
clarifai_key_map = get_model_config(model_type=self.type).clarifai_model.field_maps
|
150
|
-
# inference parameters
|
151
|
-
inference_parameters = None
|
152
|
-
if isinstance(self.infer_param, str) and os.path.isfile(self.infer_param):
|
153
|
-
inference_parameters = InferParamManager(json_path=self.infer_param).get_list_params()
|
154
|
-
inputs = clarifai_key_map.input_fields_map
|
155
|
-
outputs = clarifai_key_map.output_fields_map
|
156
|
-
|
157
|
-
# if updating new version of existing model
|
158
|
-
def update_version():
|
159
|
-
model = Model(model_id=self.id, app_id=self.app_id)
|
160
|
-
if self.url:
|
161
|
-
model.create_version_by_url(
|
162
|
-
url=self.url,
|
163
|
-
input_field_maps=inputs,
|
164
|
-
output_field_maps=outputs,
|
165
|
-
inference_parameter_configs=inference_parameters,
|
166
|
-
description=self.desc)
|
167
|
-
elif self.file:
|
168
|
-
model.create_version_by_file(
|
169
|
-
file_path=self.file,
|
170
|
-
input_field_maps=inputs,
|
171
|
-
output_field_maps=outputs,
|
172
|
-
inference_parameter_configs=inference_parameters,
|
173
|
-
no_resume=self.no_resume,
|
174
|
-
description=self.desc)
|
175
|
-
else:
|
176
|
-
raise ValueError
|
177
|
-
|
178
|
-
if self.update_version:
|
179
|
-
update_version()
|
180
|
-
else:
|
181
|
-
# creating new model
|
182
|
-
_ = App(app_id=self.app_id).create_model(self.id, model_type_id=self.type)
|
183
|
-
update_version()
|
@@ -1,21 +0,0 @@
|
|
1
|
-
import os
|
2
|
-
|
3
|
-
MAX_HW_DIM = 1024
|
4
|
-
IMAGE_TENSOR_NAME = "image"
|
5
|
-
TEXT_TENSOR_NAME = "text"
|
6
|
-
|
7
|
-
BUILT_MODEL_EXT = ".clarifai"
|
8
|
-
|
9
|
-
USER_CACHE_DIR = os.path.join(os.path.expanduser("~"), ".cache")
|
10
|
-
CLARIFAI_HOME = os.path.expanduser(
|
11
|
-
os.getenv(
|
12
|
-
"CLARIFAI_HOME",
|
13
|
-
os.path.join(os.getenv("XDG_CACHE_HOME", USER_CACHE_DIR), "clarifai"),
|
14
|
-
))
|
15
|
-
os.makedirs(CLARIFAI_HOME, exist_ok=True)
|
16
|
-
CLARIFAI_PAT_PATH = os.path.join(CLARIFAI_HOME, "pat")
|
17
|
-
|
18
|
-
CLARIFAI_EXAMPLES_REPO = "https://github.com/Clarifai/examples.git"
|
19
|
-
repo_name = CLARIFAI_EXAMPLES_REPO.split("/")[-1].replace(".git", "")
|
20
|
-
CLARIFAI_EXAMPLES_REPO_PATH = os.path.join(CLARIFAI_HOME, repo_name)
|
21
|
-
MODEL_UPLOAD_EXAMPLE_FOLDER = "model_upload"
|
@@ -1,161 +0,0 @@
|
|
1
|
-
# CLI helpers
|
2
|
-
|
3
|
-
Supported cli:
|
4
|
-
|
5
|
-
```bash
|
6
|
-
$ clarifai -h
|
7
|
-
upload Upload component to Clarifai platform
|
8
|
-
create Create component of Clarifai platform
|
9
|
-
login Login to Clarifai and save PAT locally
|
10
|
-
example Download/List examples of model upload
|
11
|
-
build Build clarifai model for uploading
|
12
|
-
```
|
13
|
-
|
14
|
-
1. Login
|
15
|
-
|
16
|
-
```bash
|
17
|
-
$ clarifai login
|
18
|
-
Get your PAT from https://clarifai.com/settings/security and pass it here: <your pat>
|
19
|
-
```
|
20
|
-
|
21
|
-
2. Create model repository
|
22
|
-
|
23
|
-
Initialize template for specify model type in provided directory
|
24
|
-
|
25
|
-
* `From scratch`:
|
26
|
-
|
27
|
-
```bash
|
28
|
-
$ clarifai create model --type <model-type> --working-dir <your_working_dir>
|
29
|
-
```
|
30
|
-
|
31
|
-
* `From example`:
|
32
|
-
|
33
|
-
```bash
|
34
|
-
$ clarifai create model --from-example --working-dir <your_working_dir>
|
35
|
-
? Select an example:
|
36
|
-
❯ multimodal_embedder/clip
|
37
|
-
text_classifier/xlm-roberta
|
38
|
-
text_embedder/instructor-xl
|
39
|
-
...
|
40
|
-
```
|
41
|
-
|
42
|
-
Then will see below output
|
43
|
-
|
44
|
-
```bash
|
45
|
-
---------------------------------------------------------------------------
|
46
|
-
* Created repository at: ./<your_working_dir>
|
47
|
-
<your_working_dir>
|
48
|
-
├── clarifai_config.yaml
|
49
|
-
├── inference.py
|
50
|
-
├── requirements.txt
|
51
|
-
└── test.py
|
52
|
-
|
53
|
-
0 directories, 4 files
|
54
|
-
|
55
|
-
* Please make sure your code is tested using `test.py` before uploading
|
56
|
-
---------------------------------------------------------------------------
|
57
|
-
```
|
58
|
-
|
59
|
-
> NOTE: if working-dir exists, need to set --overwrite flag otherwise an error arises
|
60
|
-
|
61
|
-
Full arguments
|
62
|
-
|
63
|
-
```bash
|
64
|
-
$ clarifai create model -h
|
65
|
-
--working-dir Path to your working dir. Create new dir if it does not exist
|
66
|
-
--from-example Create repository from example
|
67
|
-
--example-id Example id, run `clarifai example list` to list of examples
|
68
|
-
--type Clarifai supported model types.
|
69
|
-
--image-shape list of H W dims for models with an image input type. H and W each have a max value of 1024
|
70
|
-
--max-bs Max batch size
|
71
|
-
--overwrite Overwrite working-dir if exists
|
72
|
-
```
|
73
|
-
|
74
|
-
3. See available examples
|
75
|
-
|
76
|
-
```bash
|
77
|
-
$ clarifai example list
|
78
|
-
Found 11 examples
|
79
|
-
* multimodal_embedder/clip
|
80
|
-
* text_classifier/xlm-roberta
|
81
|
-
* text_embedder/instructor-xl
|
82
|
-
....
|
83
|
-
```
|
84
|
-
|
85
|
-
4. Build
|
86
|
-
|
87
|
-
This step will run `test.py` in provided working dir as default before building
|
88
|
-
|
89
|
-
```
|
90
|
-
$ clarifai build model <your_working_dir> --name model-name
|
91
|
-
$ tree <your_working_dir> -a
|
92
|
-
<your_working_dir>
|
93
|
-
├── .cache # (*)
|
94
|
-
│ ├── 1
|
95
|
-
│ │ ├── clarifai_config.yaml
|
96
|
-
│ │ ├── inference.py
|
97
|
-
│ │ ├── model.py
|
98
|
-
│ │ ├── test.py
|
99
|
-
│ │ └── ...
|
100
|
-
│ ├── config.pbtxt
|
101
|
-
│ └── requirements.txt
|
102
|
-
├── clarifai_config.yaml
|
103
|
-
├── inference.py
|
104
|
-
├── model-name.clarifai # (**)
|
105
|
-
├── requirements.txt
|
106
|
-
├── test.py
|
107
|
-
└── ...
|
108
|
-
```
|
109
|
-
|
110
|
-
**NOTE:**
|
111
|
-
|
112
|
-
(*): Build cache, user can simply ignore it.
|
113
|
-
|
114
|
-
(**): zipped of .cache
|
115
|
-
|
116
|
-
Full arguments
|
117
|
-
|
118
|
-
```bash
|
119
|
-
$ clarifai build model -h
|
120
|
-
positional arguments:
|
121
|
-
path Path to working directory, default is current directory
|
122
|
-
optional arguments:
|
123
|
-
--out-path Output path of built model
|
124
|
-
--name Name of built file, default is `clarifai_model_id` in config if set or`model`
|
125
|
-
--no-test Trigger this flag to skip testing before uploading
|
126
|
-
```
|
127
|
-
|
128
|
-
5. Upload
|
129
|
-
|
130
|
-
This step will execute test.py in the specified working directory by default before proceeding with the build. You can upload your built file directly from the working directory to the platform or upload it to cloud storage and provide the direct URL during the upload process.
|
131
|
-
|
132
|
-
Use the following command to upload your built file directly to the platform. It will upload the `*.clarifai` file. *Note*: Only support file size from 5MiB to 5GiB
|
133
|
-
|
134
|
-
```bash
|
135
|
-
$ clarifai upload model <your_working_dir>
|
136
|
-
```
|
137
|
-
|
138
|
-
or upload with direct download url
|
139
|
-
|
140
|
-
```bash
|
141
|
-
$ clarifai upload model <your_working_dir> --url <your url>
|
142
|
-
```
|
143
|
-
|
144
|
-
Full arguments
|
145
|
-
|
146
|
-
```bash
|
147
|
-
$ clarifai upload model -h
|
148
|
-
positional arguments:
|
149
|
-
path Path to working dir to get clarifai_config.yaml or path to yaml. Default is current directory
|
150
|
-
|
151
|
-
optional arguments:
|
152
|
-
-h, --help show this help message and exit
|
153
|
-
--url URL Direct download url of zip file
|
154
|
-
--file FILE Local built file
|
155
|
-
--id ID Model ID
|
156
|
-
--user-app USER_APP User ID and App ID separated by '/', e.g., <user_id>/<app_id>
|
157
|
-
--no-test Trigger this flag to skip testing before uploading
|
158
|
-
--no-resume Trigger this flag to not resume uploading local file
|
159
|
-
--update-version Update exist model with new version
|
160
|
-
|
161
|
-
```
|
@@ -1,229 +0,0 @@
|
|
1
|
-
# Overview
|
2
|
-
|
3
|
-
Model Serving is a straightforward interface that links user model implementations in Python with a high-performance serving framework (tritonserver). It seamlessly integrates with the Clarifai Platform, allowing users to deploy their models without any prerequisites in the serving framework.
|
4
|
-
|
5
|
-
```plaintext
|
6
|
-
|
7
|
-
|Model code in Python| ---> |Model Serving + Clarifai Platform| ---> |Served model|
|
8
|
-
|
9
|
-
```
|
10
|
-
|
11
|
-
# Understanding the concepts
|
12
|
-
|
13
|
-
While functioning as an interface, it comes with certain constraints that must be adhered to throughout the process.
|
14
|
-
|
15
|
-
## Model repository
|
16
|
-
|
17
|
-
First of all, the model repository structure obtained by running
|
18
|
-
|
19
|
-
```bash
|
20
|
-
clarifai create model --type ... --working-dir ...
|
21
|
-
```
|
22
|
-
|
23
|
-
In your working dir:
|
24
|
-
|
25
|
-
```bash
|
26
|
-
├── inference.py
|
27
|
-
├── clarifai_config.yaml
|
28
|
-
├── test.py
|
29
|
-
└── requirements.txt
|
30
|
-
```
|
31
|
-
Where:
|
32
|
-
|
33
|
-
* [inference.py](): The crucial file where users need to implement their Python code.
|
34
|
-
* [clarifai_config.yaml](): Contains all necessary configurations for model `test`, `build` and `upload`
|
35
|
-
* [test.py](): Predefined test cases to evaluate `inference.py`.
|
36
|
-
* [requirements.text](): Equivalent to a normal Python project's requirements.txt.
|
37
|
-
|
38
|
-
## inference.py
|
39
|
-
Includes the ModelInference class, inherited from one of the Clarifai Models, providing utility wrapper functions and docstring to ensure that customized models work seamlessly within the platform server. The specific Clairfai Model is determined by the --type argument provided by users in the clarifai create model command.
|
40
|
-
|
41
|
-
Sample for `text-to-text` model
|
42
|
-
|
43
|
-
```python
|
44
|
-
class InferenceModel(TextToText):
|
45
|
-
"""User model inference class."""
|
46
|
-
|
47
|
-
def __init__(self) -> None:
|
48
|
-
"""
|
49
|
-
Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
|
50
|
-
in this method so they are loaded only once for faster inference.
|
51
|
-
"""
|
52
|
-
# current directory
|
53
|
-
self.base_path: Path = os.path.dirname(__file__)
|
54
|
-
|
55
|
-
def predict(self, input_data: list,
|
56
|
-
inference_parameters: Dict[str, Union[str, float, int, bool]]) -> list:
|
57
|
-
""" Custom prediction function for `text-to-text` (also called as `text generation`) model.
|
58
|
-
|
59
|
-
Args:
|
60
|
-
input_data (List[str]): List of text
|
61
|
-
inference_parameters (Dict[str, Union[str, float, int, bool]]): your inference parameters
|
62
|
-
|
63
|
-
Returns:
|
64
|
-
list of TextOutput
|
65
|
-
|
66
|
-
"""
|
67
|
-
|
68
|
-
raise NotImplementedError()
|
69
|
-
|
70
|
-
```
|
71
|
-
|
72
|
-
Users are required to implement two functions:
|
73
|
-
|
74
|
-
* `__init__`: a method to load the model, called once.
|
75
|
-
* `predict`: a function designed to generate predictions based on the provided inputs and inference parameters. This method includes a docstring inherited from its parent, providing information on input, parameters, and output types. Refer to the docstring to confirm that the outputs of this method adhere to the correct [Clarifai Output Type](../model_config/output.py), as errors may occur otherwise.
|
76
|
-
|
77
|
-
When making predictions through the Clarifai API, user inputs are transmitted to input_data as a List of strings for text input or a List of NumPy arrays for RGB image input, where each array has a shape of [W, H, 3]. Additionally, all inference parameters are conveyed through the inference_parameters argument of the predict method.
|
78
|
-
```plaintext
|
79
|
-
|
80
|
-
list of user inputs e.g. inference parameters e.g.
|
81
|
-
`text-to-text` will be {'top_k': 5, 'temperature': 0.7, 'do_sample': False, ...}
|
82
|
-
['text', 'test text',] |
|
83
|
-
| |
|
84
|
-
| |
|
85
|
-
| |
|
86
|
-
v v
|
87
|
-
def predict(self, input_data:list, inference_parameters: Dict[str, str | float | int | bool]) -> list:
|
88
|
-
...
|
89
|
-
# Predict with input data
|
90
|
-
outputs = self.model(input_data, **inference_parameters)
|
91
|
-
# Convert to Clarifai Output Type
|
92
|
-
return [TextOutput(each) for each in outputs]
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
v
|
97
|
-
Outputs are handled by the module -> platform backend to delivery back to user
|
98
|
-
```
|
99
|
-
|
100
|
-
For testing the implementation, it's recommended to execute pytest test.py or directly call the predict method of a ModelInference instance.
|
101
|
-
|
102
|
-
## clarifai_config.yaml
|
103
|
-
|
104
|
-
`yaml` file for essential configs
|
105
|
-
|
106
|
-
```yaml
|
107
|
-
clarifai_model:
|
108
|
-
clarifai_model_id:
|
109
|
-
clarifai_user_app_id:
|
110
|
-
description:
|
111
|
-
inference_parameters: (*)
|
112
|
-
labels: (*)
|
113
|
-
type: (**)
|
114
|
-
serving_backend:
|
115
|
-
triton: (***)
|
116
|
-
max_batch_size:
|
117
|
-
image_shape:
|
118
|
-
```
|
119
|
-
|
120
|
-
Explanation:
|
121
|
-
|
122
|
-
`clarifai_model`: configs for building/testing/uploading process
|
123
|
-
|
124
|
-
* `clarifai_model_id` (str, optional): Model ID on the platform.
|
125
|
-
* `clarifai_user_app_id` (str, optional): User ID and App ID on the platform seperated by `/` for example `user_1/app_1`.
|
126
|
-
* `description` (str, optional): Model description.
|
127
|
-
> These 3 attributes are used to upload model. If not provided, they can be passed in *upload* command.
|
128
|
-
|
129
|
-
* (*) `inference_parameters` (List[Dict], optional): inference parameters for your model prediction method. This attribute is used to *test* and *upload* if provided. Two ways to insert it:
|
130
|
-
|
131
|
-
* Manual: Follow this [doc](./inference_parameters.md)
|
132
|
-
* Semi Manual: in *test.py*, init BaseTest with dict of your desired parameters. Learn more about [test.py]()
|
133
|
-
|
134
|
-
* (*) `labels` (list): insert manually list of concept names ***required by*** these model types **visual-classifier**, **visual-detector**, **visual-segmenter** and **text-classifier**.
|
135
|
-
|
136
|
-
* (**) `type` (str): type of your model, generated when init working dir. ***MUST NOT MODIFY IT***
|
137
|
-
|
138
|
-
`serving_backend`: custom config for serving
|
139
|
-
|
140
|
-
* `triton`: (optional)
|
141
|
-
|
142
|
-
* `max_batch_size` (int): Maximum number of inputs will go to `predict`. The default value is 1. Since `predict` method receives a list of inputs, if your model supports batch inference, you can set it to a value greater than 1 to leverage high-performance computation on the GPU.
|
143
|
-
|
144
|
-
* `image_shape` (list): Applicable only for image input models. It is a list of the width and height of the input image. The default is [-1, -1], which means it accepts any size.
|
145
|
-
> These 2 attributes can be set when initialize using **clarifai create model** command.
|
146
|
-
|
147
|
-
## test.py
|
148
|
-
The file is generated when initializing to test InfercenceModel in inference.py.
|
149
|
-
|
150
|
-
This test offers two essential features to enhance the testing and validation process:
|
151
|
-
|
152
|
-
**1. Implementation Validation**
|
153
|
-
|
154
|
-
Prior to initiating the build or upload processes, users can leverage this feature to thoroughly validate their implementation. This ensures the correctness and readiness of the model for deployment.
|
155
|
-
|
156
|
-
The test involves the validation of custom configuration in clarifai_config.yaml:
|
157
|
-
|
158
|
-
* Confirming that labels are provided for concept-output models.
|
159
|
-
* Verifying the format of inference_parameters.
|
160
|
-
|
161
|
-
Additionally, it validates the InferenceModel implementation:
|
162
|
-
|
163
|
-
* Ensuring the model is loaded correctly.
|
164
|
-
* Testing predict with dummy inputs.
|
165
|
-
|
166
|
-
**2. Inference Parameter Management**
|
167
|
-
|
168
|
-
Users can conveniently add or update inference parameters directly in the clarifai_config.yaml file. Additionally, the system performs automatic validation during the inference, ensuring the accuracy and compatibility of these parameters with the model's requirements. The test ensures **you can only use defined inference parameters with appropriate value**
|
169
|
-
|
170
|
-
### file structure
|
171
|
-
|
172
|
-
```python
|
173
|
-
class CustomTest(unittest.TestCase):
|
174
|
-
|
175
|
-
def setUp(self) -> None:
|
176
|
-
your_infer_parameter = dict()
|
177
|
-
self.model = BaseTest(your_infer_parameter)
|
178
|
-
|
179
|
-
def test_default_cases(self):
|
180
|
-
self.model.test_with_default_inputs()
|
181
|
-
|
182
|
-
```
|
183
|
-
|
184
|
-
Explanation:
|
185
|
-
|
186
|
-
* `your_infer_parameter = dict()`: define your inference parameters as dict with key is parameter name and value is default value of it. For example, define params for hf text-generation model:
|
187
|
-
|
188
|
-
```python
|
189
|
-
your_infer_parameter = dict(top_p=0.95, temperature=1, return_text=False, prefix="test")
|
190
|
-
```
|
191
|
-
|
192
|
-
* `self.model = BaseTest(your_infer_parameter)` Loaded implemented model and convert inference parameters to *Clarifai inference parameters` format and save it in `clarifai_config.yaml`. See more [doc](./inference_parameters.md)
|
193
|
-
|
194
|
-
* `def test_default_cases(self):` Test your model with dummy input. If these dummy input value fails your model, kindly remove or comment out this function
|
195
|
-
|
196
|
-
Define new test:
|
197
|
-
|
198
|
-
Create a function with 'test' prefix, see `pytest` document to understand how to make a test case.
|
199
|
-
Call predict by `self.model.predict([list of input data], inference_paramters)`. For instance:
|
200
|
-
|
201
|
-
* Text input:
|
202
|
-
|
203
|
-
```python
|
204
|
-
def test_text_input(self):
|
205
|
-
text: list = ["Tell me about Clarifai", "How deploy model to Clarifai"]
|
206
|
-
outputs = self.model.predict(text, temperature=0.9) # In term of inference parameters for the above example, it will PASSED
|
207
|
-
outputs = self.model.predict(text, top_k=10) # And this one will FAILED since `top_k` param is not defined when init self.model
|
208
|
-
|
209
|
-
```
|
210
|
-
|
211
|
-
* Image input:
|
212
|
-
|
213
|
-
```python
|
214
|
-
def test_image(self):
|
215
|
-
image = cv2.imread("path/to/image")
|
216
|
-
image = image[:, :, ::-1] # convert to RGB
|
217
|
-
out = self.model.predict([image])
|
218
|
-
```
|
219
|
-
|
220
|
-
* MultiModal input:
|
221
|
-
|
222
|
-
```python
|
223
|
-
def test_image_and_text(self):
|
224
|
-
image = cv2.imread("path/to/image")
|
225
|
-
image = image[:, :, ::-1]
|
226
|
-
text = "this is text"
|
227
|
-
input = dict(text=text, image=image)
|
228
|
-
out = self.model.predict([input])
|
229
|
-
```
|
@@ -1,11 +0,0 @@
|
|
1
|
-
## Inference Execution Environments
|
2
|
-
|
3
|
-
Each model built for inference with triton requires certain dependencies & dependency versions be installed for successful inference execution.
|
4
|
-
An execution environment is created for each model to be deployed on Clarifai and all necessary dependencies as listed in the `requirements.txt` file are installed there.
|
5
|
-
|
6
|
-
## Supported python and torch versions
|
7
|
-
|
8
|
-
Currently, models must use python 3.8 (any 3.8.x). Supported torch versions are 1.13.1, 2.0.1 and 2.1.1.
|
9
|
-
If your model depends on torch, torch must be listed in your requirements.txt file (even if it is
|
10
|
-
already a dependency of another package). An appropriate supported torch version will be selected
|
11
|
-
based on your requirements.txt.
|