clarifai 11.1.2__py3-none-any.whl → 11.1.4rc1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/cli/__main__.py~ +4 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__main__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/cli/model.py +13 -10
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
- clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
- clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
- clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.debug +11 -0
- clarifai/runners/dockerfile_template/Dockerfile.debug~ +9 -0
- clarifai/runners/dockerfile_template/Dockerfile.template +2 -2
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_builder.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/model_builder.py +84 -54
- clarifai/runners/models/model_run_locally.py +3 -2
- clarifai/runners/models/model_upload.py +607 -0
- clarifai/runners/utils/#const.py# +30 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/const.py +3 -0
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- {clarifai-11.1.2.dist-info → clarifai-11.1.4rc1.dist-info}/METADATA +15 -26
- clarifai-11.1.4rc1.dist-info/RECORD +194 -0
- {clarifai-11.1.2.dist-info → clarifai-11.1.4rc1.dist-info}/WHEEL +1 -1
- clarifai-11.1.2.dist-info/RECORD +0 -101
- {clarifai-11.1.2.dist-info → clarifai-11.1.4rc1.dist-info}/LICENSE +0 -0
- {clarifai-11.1.2.dist-info → clarifai-11.1.4rc1.dist-info}/entry_points.txt +0 -0
- {clarifai-11.1.2.dist-info → clarifai-11.1.4rc1.dist-info}/top_level.txt +0 -0
clarifai/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "11.1.
|
1
|
+
__version__ = "11.1.4rc1"
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
clarifai/cli/model.py
CHANGED
@@ -14,22 +14,16 @@ def model():
|
|
14
14
|
type=click.Path(exists=True),
|
15
15
|
required=True,
|
16
16
|
help='Path to the model directory.')
|
17
|
-
@click.option(
|
18
|
-
'--download_checkpoints',
|
19
|
-
is_flag=True,
|
20
|
-
help=
|
21
|
-
'Flag to download checkpoints before uploading and including them in the tar file that is uploaded. Defaults to False, which will attempt to download them at docker build time.',
|
22
|
-
)
|
23
17
|
@click.option(
|
24
18
|
'--skip_dockerfile',
|
25
19
|
is_flag=True,
|
26
20
|
help=
|
27
21
|
'Flag to skip generating a dockerfile so that you can manually edit an already created dockerfile.',
|
28
22
|
)
|
29
|
-
def upload(model_path,
|
23
|
+
def upload(model_path, skip_dockerfile):
|
30
24
|
"""Upload a model to Clarifai."""
|
31
25
|
from clarifai.runners.models.model_builder import upload_model
|
32
|
-
upload_model(model_path,
|
26
|
+
upload_model(model_path, "upload", skip_dockerfile)
|
33
27
|
|
34
28
|
|
35
29
|
@model.command()
|
@@ -46,12 +40,21 @@ def upload(model_path, download_checkpoints, skip_dockerfile):
|
|
46
40
|
help=
|
47
41
|
'Option path to write the checkpoints to. This will place them in {out_path}/ If not provided it will default to {model_path}/1/checkpoints where the config.yaml is read..'
|
48
42
|
)
|
49
|
-
|
43
|
+
@click.option(
|
44
|
+
'--stage',
|
45
|
+
type=str,
|
46
|
+
required=False,
|
47
|
+
default="build",
|
48
|
+
show_default=True,
|
49
|
+
help=
|
50
|
+
'The stage we are calling download checkpoints from. Typically this would be in the build stage which is the default. Other options include "runtime" to be used in load_model, "upload" to be used during model upload or "any" which will force download now regardless of config.yaml'
|
51
|
+
)
|
52
|
+
def download_checkpoints(model_path, out_path, stage):
|
50
53
|
"""Download checkpoints from external source to local model_path"""
|
51
54
|
|
52
55
|
from clarifai.runners.models.model_builder import ModelBuilder
|
53
56
|
builder = ModelBuilder(model_path, download_validation_only=True)
|
54
|
-
builder.download_checkpoints(out_path)
|
57
|
+
builder.download_checkpoints(stage=stage, checkpoint_path_override=out_path)
|
55
58
|
|
56
59
|
|
57
60
|
@model.command()
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -0,0 +1,11 @@
|
|
1
|
+
FROM --platform=$TARGETPLATFORM public.ecr.aws/docker/library/python:3.12-slim-bookworm as pybase
|
2
|
+
|
3
|
+
#############################
|
4
|
+
# Final runtime image
|
5
|
+
#############################
|
6
|
+
FROM --platform=$TARGETPLATFORM ${RUNTIME_IMAGE} as final
|
7
|
+
|
8
|
+
COPY --from=pybase --link=true /usr/bin/ls /usr/bin/cat /usr/bin/which /usr/bin/bash /usr/bin/sort /usr/bin/du /usr/bin/
|
9
|
+
COPY --from=pybase --link=true /bin/rbash /bin/sh /bin/rm /bin/
|
10
|
+
COPY --from=pybase --link=true /lib/*-linux-gnu/libselinux.so.1 /lib/*-linux-gnu/libpcre2-8.so.0 /lib/x86_64-linux-gnu/
|
11
|
+
COPY --from=pybase --link=true /lib/*-linux-gnu/libselinux.so.1 /lib/*-linux-gnu/libpcre2-8.so.0 /lib/aarch64-linux-gnu/
|
@@ -0,0 +1,9 @@
|
|
1
|
+
FROM --platform=$TARGETPLATFORM public.ecr.aws/docker/library/python:3.12-slim-bookworm as pybase
|
2
|
+
|
3
|
+
#############################
|
4
|
+
# Final runtime image
|
5
|
+
#############################
|
6
|
+
FROM --platform=$TARGETPLATFORM ${RUNTIME_IMAGE} as final
|
7
|
+
|
8
|
+
COPY --from=pybase --link=true /usr/bin/ls /usr/bin/cat /usr/bin/which /usr/bin/bash /usr/bin/sort /usr/bin/du /usr/bin/
|
9
|
+
COPY --from=pybase --link=true /bin/rbash /bin/sh /bin/rm /bin/
|
@@ -47,9 +47,9 @@ COPY --link=true --from=downloader /venv /venv
|
|
47
47
|
COPY --chown=nonroot:nonroot downloader/unused.yaml /home/nonroot/main/1/checkpoints/.cache/unused.yaml
|
48
48
|
|
49
49
|
#####
|
50
|
-
# Download checkpoints
|
50
|
+
# Download checkpoints if config.yaml has checkpoints.when = "build"
|
51
51
|
COPY --link=true config.yaml /home/nonroot/main/
|
52
|
-
RUN ["python", "-m", "clarifai.cli", "model", "download-checkpoints", "--model_path", "/home/nonroot/main", "--out_path", "/home/nonroot/main"]
|
52
|
+
RUN ["python", "-m", "clarifai.cli", "model", "download-checkpoints", "--model_path", "/home/nonroot/main", "--out_path", "/home/nonroot/main", "--stage", "build"]
|
53
53
|
#####
|
54
54
|
|
55
55
|
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -16,9 +16,9 @@ from rich.markup import escape
|
|
16
16
|
|
17
17
|
from clarifai.client import BaseClient
|
18
18
|
from clarifai.runners.models.model_class import ModelClass
|
19
|
-
from clarifai.runners.utils.const import (
|
20
|
-
|
21
|
-
|
19
|
+
from clarifai.runners.utils.const import (
|
20
|
+
AVAILABLE_PYTHON_IMAGES, AVAILABLE_TORCH_IMAGES, CONCEPTS_REQUIRED_MODEL_TYPE,
|
21
|
+
DEFAULT_DOWNLOAD_CHECKPOINT_WHEN, DEFAULT_PYTHON_VERSION, PYTHON_BASE_IMAGE, TORCH_BASE_IMAGE)
|
22
22
|
from clarifai.runners.utils.loader import HuggingFaceLoader
|
23
23
|
from clarifai.urls.helper import ClarifaiUrlHelper
|
24
24
|
from clarifai.utils.logging import logger
|
@@ -30,19 +30,6 @@ dependencies = [
|
|
30
30
|
'torch',
|
31
31
|
'clarifai',
|
32
32
|
]
|
33
|
-
# Escape dependency names for regex
|
34
|
-
dep_pattern = '|'.join(map(re.escape, dependencies))
|
35
|
-
# All possible version specifiers
|
36
|
-
version_specifiers = '==|>=|<=|!=|~=|>|<'
|
37
|
-
# Compile a regex pattern with verbose mode for readability
|
38
|
-
pattern = re.compile(r"""
|
39
|
-
^\s* # Start of line, optional whitespace
|
40
|
-
(?P<dependency>""" + dep_pattern + r""") # Dependency name
|
41
|
-
\s* # Optional whitespace
|
42
|
-
(?P<specifier>""" + version_specifiers + r""")? # Optional version specifier
|
43
|
-
\s* # Optional whitespace
|
44
|
-
(?P<version>[^\s;]+)? # Optional version (up to space or semicolon)
|
45
|
-
""", re.VERBOSE)
|
46
33
|
|
47
34
|
|
48
35
|
def _clear_line(n: int = 1) -> None:
|
@@ -158,11 +145,25 @@ class ModelBuilder:
|
|
158
145
|
:return: repo_id location of checkpoint.
|
159
146
|
:return: hf_token token to access checkpoint.
|
160
147
|
"""
|
148
|
+
if "checkpoints" not in self.config:
|
149
|
+
return None, None, None, DEFAULT_DOWNLOAD_CHECKPOINT_WHEN
|
161
150
|
assert "type" in self.config.get("checkpoints"), "No loader type specified in the config file"
|
162
151
|
loader_type = self.config.get("checkpoints").get("type")
|
163
152
|
if not loader_type:
|
164
153
|
logger.info("No loader type specified in the config file for checkpoints")
|
165
154
|
return None, None, None
|
155
|
+
checkpoints = self.config.get("checkpoints")
|
156
|
+
if 'when' not in checkpoints:
|
157
|
+
logger.warn(
|
158
|
+
f"No 'when' specified in the config file for checkpoints, defaulting to download at {DEFAULT_DOWNLOAD_CHECKPOINT_WHEN}"
|
159
|
+
)
|
160
|
+
when = checkpoints.get("when", DEFAULT_DOWNLOAD_CHECKPOINT_WHEN)
|
161
|
+
# In the config.yaml we don't allow "any", that's only used in download_checkpoints to force download.
|
162
|
+
assert when in [
|
163
|
+
"upload",
|
164
|
+
"build",
|
165
|
+
"runtime",
|
166
|
+
], "Invalid value for when in the checkpoint loader when, needs to be one of ['upload', 'build', 'runtime']"
|
166
167
|
assert loader_type == "huggingface", "Only huggingface loader supported for now"
|
167
168
|
if loader_type == "huggingface":
|
168
169
|
assert "repo_id" in self.config.get("checkpoints"), "No repo_id specified in the config file"
|
@@ -170,7 +171,7 @@ class ModelBuilder:
|
|
170
171
|
|
171
172
|
# get from config.yaml otherwise fall back to HF_TOKEN env var.
|
172
173
|
hf_token = self.config.get("checkpoints").get("hf_token", os.environ.get("HF_TOKEN", None))
|
173
|
-
return loader_type, repo_id, hf_token
|
174
|
+
return loader_type, repo_id, hf_token, when
|
174
175
|
|
175
176
|
def _check_app_exists(self):
|
176
177
|
resp = self.client.STUB.GetApp(service_pb2.GetAppRequest(user_app_id=self.client.user_app_id))
|
@@ -215,7 +216,7 @@ class ModelBuilder:
|
|
215
216
|
assert model_type_id in CONCEPTS_REQUIRED_MODEL_TYPE, f"Model type {model_type_id} not supported for concepts"
|
216
217
|
|
217
218
|
if self.config.get("checkpoints"):
|
218
|
-
loader_type, _, hf_token = self._validate_config_checkpoints()
|
219
|
+
loader_type, _, hf_token, _ = self._validate_config_checkpoints()
|
219
220
|
|
220
221
|
if loader_type == "huggingface" and hf_token:
|
221
222
|
is_valid_token = HuggingFaceLoader.validate_hftoken(hf_token)
|
@@ -310,25 +311,41 @@ class ModelBuilder:
|
|
310
311
|
)
|
311
312
|
return self.client.STUB.PostModels(request)
|
312
313
|
|
314
|
+
def _match_req_line(self, line):
|
315
|
+
line = line.strip()
|
316
|
+
if not line or line.startswith('#'):
|
317
|
+
return None, None
|
318
|
+
# split on whitespace followed by #
|
319
|
+
line = re.split(r'\s+#', line)[0]
|
320
|
+
if "==" in line:
|
321
|
+
pkg, version = line.split("==")
|
322
|
+
elif ">=" in line:
|
323
|
+
pkg, version = line.split(">=")
|
324
|
+
elif ">" in line:
|
325
|
+
pkg, version = line.split(">")
|
326
|
+
elif "<=" in line:
|
327
|
+
pkg, version = line.split("<=")
|
328
|
+
elif "<" in line:
|
329
|
+
pkg, version = line.split("<")
|
330
|
+
else:
|
331
|
+
pkg, version = line, None # No version specified
|
332
|
+
for dep in dependencies:
|
333
|
+
if dep == pkg:
|
334
|
+
if dep == 'torch' and line.find(
|
335
|
+
'whl/cpu') > 0: # Ignore torch-cpu whl files, use base mage.
|
336
|
+
return None, None
|
337
|
+
return dep.strip(), version.strip() if version else None
|
338
|
+
return None, None
|
339
|
+
|
313
340
|
def _parse_requirements(self):
|
314
341
|
dependencies_version = {}
|
315
342
|
with open(os.path.join(self.folder, 'requirements.txt'), 'r') as file:
|
316
343
|
for line in file:
|
317
344
|
# Skip empty lines and comments
|
318
|
-
|
319
|
-
if
|
345
|
+
dependency, version = self._match_req_line(line)
|
346
|
+
if dependency is None:
|
320
347
|
continue
|
321
|
-
|
322
|
-
line = re.split(r'\s+#', line)[0]
|
323
|
-
match = pattern.match(line)
|
324
|
-
if match:
|
325
|
-
dependency = match.group('dependency')
|
326
|
-
version = match.group('version')
|
327
|
-
if dependency == "torch" and line.find(
|
328
|
-
'whl/cpu') > 0: # Ignore torch-cpu whl files, use base mage.
|
329
|
-
continue
|
330
|
-
|
331
|
-
dependencies_version[dependency] = version if version else None
|
348
|
+
dependencies_version[dependency] = version if version else None
|
332
349
|
return dependencies_version
|
333
350
|
|
334
351
|
def create_dockerfile(self):
|
@@ -395,10 +412,8 @@ class ModelBuilder:
|
|
395
412
|
with open(os.path.join(self.folder, 'requirements.txt'), 'r') as file:
|
396
413
|
for line in file:
|
397
414
|
# if the line without whitespace is "clarifai"
|
398
|
-
|
399
|
-
|
400
|
-
match = pattern.match(matchline)
|
401
|
-
if match and match.group('dependency') == "clarifai":
|
415
|
+
dependency, version = self._match_req_line(line)
|
416
|
+
if dependency and dependency == "clarifai":
|
402
417
|
lines.append(line.replace("clarifai", f"clarifai=={CLIENT_VERSION}"))
|
403
418
|
else:
|
404
419
|
lines.append(line)
|
@@ -433,10 +448,13 @@ class ModelBuilder:
|
|
433
448
|
def tar_file(self):
|
434
449
|
return f"{self.folder}.tar.gz"
|
435
450
|
|
436
|
-
def download_checkpoints(self, checkpoint_path_override: str = None):
|
451
|
+
def download_checkpoints(self, stage: str, checkpoint_path_override: str = None):
|
437
452
|
"""
|
438
453
|
Downloads the checkpoints specified in the config file.
|
439
454
|
|
455
|
+
:param stage: The stage of the build process. This is used to determine when to download the
|
456
|
+
checkpoints. The stage can be one of ['build', 'upload', 'runtime', 'any']. If "any" it will always try to download
|
457
|
+
regardless of what is specified in config.yaml. Otherwise it must match what is in config.yaml
|
440
458
|
:param checkpoint_path_override: The path to download the checkpoints to. If not provided, the
|
441
459
|
default path is used based on the folder ModelUploader was initialized with. The
|
442
460
|
checkpoint_suffix will be appended to the path.
|
@@ -445,7 +463,14 @@ class ModelBuilder:
|
|
445
463
|
logger.info("No checkpoints specified in the config file")
|
446
464
|
return True
|
447
465
|
|
448
|
-
loader_type, repo_id, hf_token = self._validate_config_checkpoints()
|
466
|
+
loader_type, repo_id, hf_token, when = self._validate_config_checkpoints()
|
467
|
+
if stage not in ["build", "upload", "runtime", "any"]:
|
468
|
+
raise Exception("Invalid stage provided, must be one of ['build', 'upload', 'runtime']")
|
469
|
+
if when != stage and stage != "any":
|
470
|
+
logger.info(
|
471
|
+
f"Skipping downloading checkpoints for stage {stage} since config.yaml says to download them at stage {when}"
|
472
|
+
)
|
473
|
+
return True # success seems fine here.
|
449
474
|
|
450
475
|
success = True
|
451
476
|
if loader_type == "huggingface":
|
@@ -519,11 +544,12 @@ class ModelBuilder:
|
|
519
544
|
self._concepts_protos_from_concepts(labels))
|
520
545
|
return model_version_proto
|
521
546
|
|
522
|
-
def upload_model_version(self
|
547
|
+
def upload_model_version(self):
|
523
548
|
file_path = f"{self.folder}.tar.gz"
|
524
549
|
logger.debug(f"Will tar it into file: {file_path}")
|
525
550
|
|
526
551
|
model_type_id = self.config.get('model').get('model_type_id')
|
552
|
+
loader_type, repo_id, hf_token, when = self._validate_config_checkpoints()
|
527
553
|
|
528
554
|
if (model_type_id in CONCEPTS_REQUIRED_MODEL_TYPE) and 'concepts' not in self.config:
|
529
555
|
logger.info(
|
@@ -533,15 +559,13 @@ class ModelBuilder:
|
|
533
559
|
logger.info(
|
534
560
|
"Checkpoints specified in the config.yaml file, will download the HF model's config.json file to infer the concepts."
|
535
561
|
)
|
536
|
-
|
537
|
-
if
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
loader_type, repo_id, hf_token = self._validate_config_checkpoints()
|
544
|
-
if loader_type == "huggingface":
|
562
|
+
# If we don't already have the concepts, download the config.json file from HuggingFace
|
563
|
+
if loader_type == "huggingface":
|
564
|
+
# If the config.yaml says we'll download in the future (build time or runtime) then we need to get this config now.
|
565
|
+
if when != "upload" and not HuggingFaceLoader.validate_config(self.checkpoint_path):
|
566
|
+
input(
|
567
|
+
"Press Enter to download the HuggingFace model's config.json file to infer the concepts and continue..."
|
568
|
+
)
|
545
569
|
loader = HuggingFaceLoader(repo_id=repo_id, token=hf_token)
|
546
570
|
loader.download_config(self.checkpoint_path)
|
547
571
|
|
@@ -556,7 +580,7 @@ class ModelBuilder:
|
|
556
580
|
def filter_func(tarinfo):
|
557
581
|
name = tarinfo.name
|
558
582
|
exclude = [self.tar_file, "*~"]
|
559
|
-
if
|
583
|
+
if when != "upload":
|
560
584
|
exclude.append(self.checkpoint_suffix)
|
561
585
|
return None if any(name.endswith(ex) for ex in exclude) else tarinfo
|
562
586
|
|
@@ -568,12 +592,12 @@ class ModelBuilder:
|
|
568
592
|
logger.debug(f"Size of the tar is: {file_size} bytes")
|
569
593
|
|
570
594
|
self.storage_request_size = self._get_tar_file_content_size(file_path)
|
571
|
-
if
|
595
|
+
if when != "upload" and self.config.get("checkpoints"):
|
572
596
|
# Get the checkpoint size to add to the storage request.
|
573
597
|
# First check for the env variable, then try querying huggingface. If all else fails, use the default.
|
574
598
|
checkpoint_size = os.environ.get('CHECKPOINT_SIZE_BYTES', 0)
|
575
599
|
if not checkpoint_size:
|
576
|
-
_, repo_id, _ = self._validate_config_checkpoints()
|
600
|
+
_, repo_id, _, _ = self._validate_config_checkpoints()
|
577
601
|
checkpoint_size = HuggingFaceLoader.get_huggingface_checkpoint_total_size(repo_id)
|
578
602
|
if not checkpoint_size:
|
579
603
|
checkpoint_size = self.DEFAULT_CHECKPOINT_SIZE
|
@@ -701,10 +725,16 @@ class ModelBuilder:
|
|
701
725
|
return False
|
702
726
|
|
703
727
|
|
704
|
-
def upload_model(folder,
|
728
|
+
def upload_model(folder, stage, skip_dockerfile):
|
729
|
+
"""
|
730
|
+
Uploads a model to Clarifai.
|
731
|
+
|
732
|
+
:param folder: The folder containing the model files.
|
733
|
+
:param stage: The stage of when you're uploading this model. This is used to determine when to download the checkpoints based on a match with the "when" field in the config.yaml checkpoints section or if you set stage to "any" it will always download the checkpoints.
|
734
|
+
:param skip_dockerfile: If True, skips creating the Dockerfile so you can re-use the local one.
|
735
|
+
"""
|
705
736
|
builder = ModelBuilder(folder)
|
706
|
-
|
707
|
-
builder.download_checkpoints()
|
737
|
+
builder.download_checkpoints(stage=stage)
|
708
738
|
if not skip_dockerfile:
|
709
739
|
builder.create_dockerfile()
|
710
740
|
exists = builder.check_model_exists()
|
@@ -716,4 +746,4 @@ def upload_model(folder, download_checkpoints, skip_dockerfile):
|
|
716
746
|
logger.info(f"New model will be created at {builder.model_url} with it's first version.")
|
717
747
|
|
718
748
|
input("Press Enter to continue...")
|
719
|
-
builder.upload_model_version(
|
749
|
+
builder.upload_model_version()
|
@@ -475,13 +475,14 @@ def main(model_path,
|
|
475
475
|
keep_env=False,
|
476
476
|
keep_image=False):
|
477
477
|
|
478
|
-
if not os.environ
|
478
|
+
if not os.environ.get("CLARIFAI_PAT", None):
|
479
479
|
logger.error(
|
480
480
|
"CLARIFAI_PAT environment variable is not set! Please set your PAT in the 'CLARIFAI_PAT' environment variable."
|
481
481
|
)
|
482
482
|
sys.exit(1)
|
483
483
|
manager = ModelRunLocally(model_path)
|
484
|
-
|
484
|
+
# stage="any" forces downloaded now regardless of config.yaml
|
485
|
+
manager.builder.download_checkpoints(stage="any")
|
485
486
|
if inside_container:
|
486
487
|
if not manager.is_docker_installed():
|
487
488
|
sys.exit(1)
|