clarifai 10.10.0__py3-none-any.whl → 10.11.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/cli/model.py +7 -2
- clarifai/client/dataset.py +21 -0
- clarifai/client/deployment.py +2 -1
- clarifai/client/input.py +27 -0
- clarifai/client/model.py +1 -1
- clarifai/datasets/export/inputs_annotations.py +8 -0
- clarifai/datasets/upload/multimodal.py +0 -2
- clarifai/runners/dockerfile_template/{Dockerfile.cpu.template → Dockerfile.template} +21 -11
- clarifai/runners/models/base_typed_model.py +6 -3
- clarifai/runners/models/model_run_locally.py +24 -9
- clarifai/runners/models/model_upload.py +101 -13
- clarifai/runners/utils/url_fetcher.py +19 -12
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/METADATA +16 -16
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/RECORD +19 -20
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/WHEEL +1 -1
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -83
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/LICENSE +0 -0
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/entry_points.txt +0 -0
- {clarifai-10.10.0.dist-info → clarifai-10.11.0.dist-info}/top_level.txt +0 -0
clarifai/__init__.py
CHANGED
@@ -1 +1 @@
|
|
1
|
-
__version__ = "10.
|
1
|
+
__version__ = "10.11.0"
|
clarifai/cli/model.py
CHANGED
@@ -39,11 +39,16 @@ def upload(model_path, download_checkpoints, skip_dockerfile):
|
|
39
39
|
type=click.Path(exists=True),
|
40
40
|
required=True,
|
41
41
|
help='Path to the model directory.')
|
42
|
-
|
42
|
+
@click.option(
|
43
|
+
'--keep_env',
|
44
|
+
is_flag=True,
|
45
|
+
help='Flag to keep the virtual environment after testing the model\
|
46
|
+
locally. Defaults to False, which will delete the virtual environment after testing.')
|
47
|
+
def test_locally(model_path, keep_env=False):
|
43
48
|
"""Test model locally."""
|
44
49
|
try:
|
45
50
|
from clarifai.runners.models import model_run_locally
|
46
|
-
model_run_locally.main(model_path)
|
51
|
+
model_run_locally.main(model_path, keep_env=keep_env)
|
47
52
|
click.echo(f"Model tested locally from {model_path}.")
|
48
53
|
except Exception as e:
|
49
54
|
click.echo(f"Failed to test model locally: {e}", err=True)
|
clarifai/client/dataset.py
CHANGED
@@ -9,6 +9,7 @@ from typing import Dict, Generator, List, Optional, Tuple, Type, TypeVar, Union
|
|
9
9
|
|
10
10
|
import requests
|
11
11
|
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
12
|
+
from clarifai_grpc.grpc.api.resources_pb2 import Input
|
12
13
|
from clarifai_grpc.grpc.api.service_pb2 import MultiInputResponse
|
13
14
|
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
14
15
|
from google.protobuf.json_format import MessageToDict
|
@@ -190,6 +191,26 @@ class Dataset(Lister, BaseClient):
|
|
190
191
|
}
|
191
192
|
yield Dataset.from_auth_helper(self.auth_helper, **kwargs)
|
192
193
|
|
194
|
+
def list_inputs(self, page_no: int = None, per_page: int = None,
|
195
|
+
input_type: str = None) -> Generator[Input, None, None]:
|
196
|
+
"""Lists all the inputs for the dataset.
|
197
|
+
|
198
|
+
Args:
|
199
|
+
page_no (int): The page number to list.
|
200
|
+
per_page (int): The number of items per page.
|
201
|
+
input_type (str): The type of input to list. Options: 'image', 'video', 'audio', 'text'.
|
202
|
+
|
203
|
+
Yields:
|
204
|
+
Input: Input objects in the dataset.
|
205
|
+
|
206
|
+
Example:
|
207
|
+
>>> from clarifai.client.dataset import Dataset
|
208
|
+
>>> dataset = Dataset(dataset_id='dataset_id', user_id='user_id', app_id='app_id')
|
209
|
+
>>> all_dataset_inputs = list(dataset.list_inputs())
|
210
|
+
"""
|
211
|
+
return self.input_object.list_inputs(
|
212
|
+
dataset_id=self.id, page_no=page_no, per_page=per_page, input_type=input_type)
|
213
|
+
|
193
214
|
def __iter__(self):
|
194
215
|
return iter(DatasetExportReader(archive_url=self.archive_zip()))
|
195
216
|
|
clarifai/client/deployment.py
CHANGED
@@ -49,7 +49,8 @@ class Deployment(Lister, BaseClient):
|
|
49
49
|
Returns:
|
50
50
|
resources_pb2.RunnerSelector: A RunnerSelector object for the given deployment_id.
|
51
51
|
"""
|
52
|
-
return resources_pb2.RunnerSelector(
|
52
|
+
return resources_pb2.RunnerSelector(
|
53
|
+
deployment=resources_pb2.Deployment(id=deployment_id, user_id=user_id))
|
53
54
|
|
54
55
|
def __getattr__(self, name):
|
55
56
|
return getattr(self.deployment_info, name)
|
clarifai/client/input.py
CHANGED
@@ -867,6 +867,33 @@ class Inputs(Lister, BaseClient):
|
|
867
867
|
raise Exception(response.status)
|
868
868
|
self.logger.info("\nInputs Deleted\n%s", response.status)
|
869
869
|
|
870
|
+
def delete_annotations(self, input_ids: List[str], annotation_ids: List[str] = []) -> None:
|
871
|
+
"""Delete list of annotations of input objects from the app.
|
872
|
+
|
873
|
+
Args:
|
874
|
+
input_ids (Input): List of input objects for which annotations to delete.
|
875
|
+
annotation_ids (List[str]): List of annotation ids to delete.
|
876
|
+
|
877
|
+
Example:
|
878
|
+
>>> from clarifai.client.user import User
|
879
|
+
>>> input_obj = User(user_id="user_id").app(app_id="app_id").inputs()
|
880
|
+
>>> input_obj.delete_annotations(input_ids=['input_id_1', 'input_id_2'])
|
881
|
+
|
882
|
+
Note:
|
883
|
+
'annotation_ids' are optional but if the are provided, the number and order in
|
884
|
+
'annotation_ids' and 'input_ids' should match
|
885
|
+
"""
|
886
|
+
if not isinstance(input_ids, list):
|
887
|
+
raise UserError("input_ids must be a list of input ids")
|
888
|
+
if annotation_ids and len(input_ids) != len(annotation_ids):
|
889
|
+
raise UserError("Number of provided annotation_ids and input_ids should match.")
|
890
|
+
request = service_pb2.DeleteAnnotationsRequest(
|
891
|
+
user_app_id=self.user_app_id, ids=annotation_ids, input_ids=input_ids)
|
892
|
+
response = self._grpc_request(self.STUB.DeleteAnnotations, request)
|
893
|
+
if response.status.code != status_code_pb2.SUCCESS:
|
894
|
+
raise Exception(response.status)
|
895
|
+
self.logger.info("\nAnnotations Deleted\n%s", response.status)
|
896
|
+
|
870
897
|
def download_inputs(self, inputs: List[Input]) -> List[bytes]:
|
871
898
|
"""Download list of input objects from the app.
|
872
899
|
|
clarifai/client/model.py
CHANGED
@@ -165,6 +165,14 @@ class InputAnnotationDownloader:
|
|
165
165
|
def _save_annotation_to_archive(self, new_archive: zipfile.ZipFile, annot_data: List[Dict],
|
166
166
|
file_name: str) -> None:
|
167
167
|
"""Gets the annotation response bytestring (from requests) and append to zip file."""
|
168
|
+
# Fill zero values for missing bounding box keys
|
169
|
+
for annot in annot_data:
|
170
|
+
if annot.get('regionInfo') and annot['regionInfo'].get('boundingBox'):
|
171
|
+
bbox = annot['regionInfo']['boundingBox']
|
172
|
+
bbox.setdefault('topRow', 0)
|
173
|
+
bbox.setdefault('leftCol', 0)
|
174
|
+
bbox.setdefault('bottomRow', 0)
|
175
|
+
bbox.setdefault('rightCol', 0)
|
168
176
|
# Serialize the dictionary to a JSON string
|
169
177
|
json_str = json.dumps(annot_data)
|
170
178
|
# Convert the JSON string to bytes
|
@@ -6,7 +6,6 @@ from google.protobuf.struct_pb2 import Struct
|
|
6
6
|
|
7
7
|
from clarifai.client.input import Inputs
|
8
8
|
from clarifai.datasets.upload.base import ClarifaiDataLoader, ClarifaiDataset
|
9
|
-
from clarifai.utils.misc import get_uuid
|
10
9
|
|
11
10
|
|
12
11
|
class MultiModalDataset(ClarifaiDataset):
|
@@ -36,7 +35,6 @@ class MultiModalDataset(ClarifaiDataset):
|
|
36
35
|
image_bytes = data_item.image_bytes
|
37
36
|
text = data_item.text
|
38
37
|
labels = data_item.labels if isinstance(data_item.labels, list) else [data_item.labels]
|
39
|
-
id = get_uuid(8)
|
40
38
|
input_id = f"{self.dataset_id}-{id}" if data_item.id is None else f"{self.dataset_id}-{str(data_item.id)}"
|
41
39
|
if data_item.metadata is not None:
|
42
40
|
metadata.update(data_item.metadata)
|
@@ -1,16 +1,16 @@
|
|
1
|
-
|
2
|
-
FROM public.ecr.aws/docker/library/python:${PYTHON_VERSION}-slim-bookworm as build
|
1
|
+
FROM --platform=$TARGETPLATFORM ${BASE_IMAGE} as build
|
3
2
|
|
4
|
-
|
5
|
-
WORKDIR /app
|
3
|
+
ENV DEBIAN_FRONTEND=noninteractive
|
6
4
|
|
5
|
+
#############################
|
6
|
+
# User specific requirements
|
7
|
+
#############################
|
7
8
|
COPY requirements.txt .
|
8
|
-
# Install requirements and cleanup before leaving this line.
|
9
|
-
# Note(zeiler): this could be in a future template as {{model_python_deps}}
|
10
|
-
RUN python -m pip install -r requirements.txt && rm -rf /root/.cache
|
11
9
|
|
12
|
-
# Install
|
13
|
-
|
10
|
+
# Install requirements and clarifai package and cleanup before leaving this line.
|
11
|
+
# Note(zeiler): this could be in a future template as {{model_python_deps}}
|
12
|
+
RUN pip install --no-cache-dir -r requirements.txt && \
|
13
|
+
pip install --no-cache-dir clarifai
|
14
14
|
|
15
15
|
# These will be set by the templaing system.
|
16
16
|
ENV CLARIFAI_PAT=${CLARIFAI_PAT}
|
@@ -20,12 +20,22 @@ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
|
|
20
20
|
ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
|
21
21
|
ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
|
22
22
|
|
23
|
+
# Set the NUMBA cache dir to /tmp
|
24
|
+
ENV NUMBA_CACHE_DIR=/tmp/numba_cache
|
25
|
+
ENV HOME=/tmp
|
26
|
+
|
27
|
+
# Set the working directory to /app
|
28
|
+
WORKDIR /app
|
29
|
+
|
23
30
|
# Copy the current folder into /app/model_dir that the SDK will expect.
|
31
|
+
# Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
|
24
32
|
COPY . /app/model_dir/${name}
|
25
33
|
|
26
34
|
# Add the model directory to the python path.
|
27
|
-
ENV PYTHONPATH
|
35
|
+
ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
|
36
|
+
|
37
|
+
ENTRYPOINT ["python", "-m", "clarifai.runners.server"]
|
28
38
|
|
29
39
|
# Finally run the clarifai entrypoint to start the runner loop and local dev server.
|
30
40
|
# Note(zeiler): we may want to make this a clarifai CLI call.
|
31
|
-
CMD ["
|
41
|
+
CMD ["--model_path", "/app/model_dir/main"]
|
@@ -24,7 +24,8 @@ class AnyAnyModel(ModelRunner):
|
|
24
24
|
list_input_dict = [
|
25
25
|
InputDataHandler.from_proto(input).to_python() for input in input_request.inputs
|
26
26
|
]
|
27
|
-
inference_params = json_format.MessageToDict(
|
27
|
+
inference_params = json_format.MessageToDict(
|
28
|
+
input_request.model.model_version.output_info.params)
|
28
29
|
|
29
30
|
return list_input_dict, inference_params
|
30
31
|
|
@@ -141,7 +142,8 @@ class VisualInputModel(AnyAnyModel):
|
|
141
142
|
list_input_dict = [
|
142
143
|
InputDataHandler.from_proto(input).image(format="np") for input in input_request.inputs
|
143
144
|
]
|
144
|
-
inference_params = json_format.MessageToDict(
|
145
|
+
inference_params = json_format.MessageToDict(
|
146
|
+
input_request.model.model_version.output_info.params)
|
145
147
|
|
146
148
|
return list_input_dict, inference_params
|
147
149
|
|
@@ -181,7 +183,8 @@ class TextInputModel(AnyAnyModel):
|
|
181
183
|
def parse_input_request(
|
182
184
|
self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
|
183
185
|
list_input_text = [InputDataHandler.from_proto(input).text for input in input_request.inputs]
|
184
|
-
inference_params = json_format.MessageToDict(
|
186
|
+
inference_params = json_format.MessageToDict(
|
187
|
+
input_request.model.model_version.output_info.params)
|
185
188
|
|
186
189
|
return list_input_text, inference_params
|
187
190
|
|
@@ -1,3 +1,4 @@
|
|
1
|
+
import hashlib
|
1
2
|
import importlib.util
|
2
3
|
import inspect
|
3
4
|
import os
|
@@ -13,6 +14,7 @@ from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
|
13
14
|
from clarifai_protocol import BaseRunner
|
14
15
|
|
15
16
|
from clarifai.runners.models.model_upload import ModelUploader
|
17
|
+
from clarifai.runners.utils.url_fetcher import ensure_urls_downloaded
|
16
18
|
from clarifai.utils.logging import logger
|
17
19
|
|
18
20
|
|
@@ -24,17 +26,26 @@ class ModelRunLocally:
|
|
24
26
|
|
25
27
|
def create_temp_venv(self):
|
26
28
|
"""Create a temporary virtual environment."""
|
27
|
-
|
28
|
-
|
29
|
+
with open(self.requirements_file, "r") as f:
|
30
|
+
requirements_hash = hashlib.md5(f.read().encode('utf-8')).hexdigest()
|
31
|
+
|
32
|
+
temp_dir = os.path.join(tempfile.gettempdir(), str(requirements_hash))
|
29
33
|
venv_dir = os.path.join(temp_dir, "venv")
|
30
|
-
|
34
|
+
|
35
|
+
if os.path.exists(temp_dir):
|
36
|
+
logger.info(f"Using previous virtual environment at {temp_dir}")
|
37
|
+
use_existing_venv = True
|
38
|
+
else:
|
39
|
+
logger.info("Creating temporary virtual environment...")
|
40
|
+
use_existing_venv = False
|
41
|
+
venv.create(venv_dir, with_pip=True)
|
42
|
+
logger.info(f"Created temporary virtual environment at {venv_dir}")
|
31
43
|
|
32
44
|
self.venv_dir = venv_dir
|
33
45
|
self.temp_dir = temp_dir
|
34
46
|
self.python_executable = os.path.join(venv_dir, "bin", "python")
|
35
47
|
|
36
|
-
|
37
|
-
return venv_dir, temp_dir
|
48
|
+
return use_existing_venv
|
38
49
|
|
39
50
|
def install_requirements(self):
|
40
51
|
"""Install the dependencies from requirements.txt and Clarifai."""
|
@@ -102,6 +113,8 @@ class ModelRunLocally:
|
|
102
113
|
"""Perform inference using the runner."""
|
103
114
|
request = self._build_request()
|
104
115
|
|
116
|
+
ensure_urls_downloaded(request)
|
117
|
+
|
105
118
|
try:
|
106
119
|
return runner.predict(request)
|
107
120
|
except Exception as e:
|
@@ -175,16 +188,18 @@ class ModelRunLocally:
|
|
175
188
|
shutil.rmtree(self.temp_dir)
|
176
189
|
|
177
190
|
|
178
|
-
def main(model_path, run_model_server=False):
|
191
|
+
def main(model_path, run_model_server=False, keep_env=False):
|
179
192
|
|
180
193
|
manager = ModelRunLocally(model_path)
|
181
|
-
manager.create_temp_venv()
|
194
|
+
use_existing_env = manager.create_temp_venv()
|
182
195
|
|
183
196
|
try:
|
184
|
-
|
197
|
+
if not use_existing_env:
|
198
|
+
manager.install_requirements()
|
185
199
|
if run_model_server:
|
186
200
|
manager.run_model_server()
|
187
201
|
else:
|
188
202
|
manager.test_model()
|
189
203
|
finally:
|
190
|
-
|
204
|
+
if not keep_env:
|
205
|
+
manager.clean_up()
|
@@ -1,4 +1,5 @@
|
|
1
1
|
import os
|
2
|
+
import re
|
2
3
|
import time
|
3
4
|
from string import Template
|
4
5
|
|
@@ -23,6 +24,44 @@ def _clear_line(n: int = 1) -> None:
|
|
23
24
|
|
24
25
|
class ModelUploader:
|
25
26
|
DEFAULT_PYTHON_VERSION = 3.11
|
27
|
+
DEFAULT_TORCH_VERSION = '2.4.0'
|
28
|
+
DEFAULT_CUDA_VERSION = '124'
|
29
|
+
# List of available torch images for matrix
|
30
|
+
'''
|
31
|
+
python_version: ['3.8', '3.9', '3.10', '3.11']
|
32
|
+
torch_version: ['2.0.0', '2.1.0', '2.2.0', '2.3.0', '2.4.0', '2.4.1', '2.5.0']
|
33
|
+
cuda_version: ['124']
|
34
|
+
'''
|
35
|
+
AVAILABLE_TORCH_IMAGES = [
|
36
|
+
'2.0.0-py3.8-cuda124',
|
37
|
+
'2.0.0-py3.9-cuda124',
|
38
|
+
'2.0.0-py3.10-cuda124',
|
39
|
+
'2.0.0-py3.11-cuda124',
|
40
|
+
'2.1.0-py3.8-cuda124',
|
41
|
+
'2.1.0-py3.9-cuda124',
|
42
|
+
'2.1.0-py3.10-cuda124',
|
43
|
+
'2.1.0-py3.11-cuda124',
|
44
|
+
'2.2.0-py3.8-cuda124',
|
45
|
+
'2.2.0-py3.9-cuda124',
|
46
|
+
'2.2.0-py3.10-cuda124',
|
47
|
+
'2.2.0-py3.11-cuda124',
|
48
|
+
'2.3.0-py3.8-cuda124',
|
49
|
+
'2.3.0-py3.9-cuda124',
|
50
|
+
'2.3.0-py3.10-cuda124',
|
51
|
+
'2.3.0-py3.11-cuda124',
|
52
|
+
'2.4.0-py3.8-cuda124',
|
53
|
+
'2.4.0-py3.9-cuda124',
|
54
|
+
'2.4.0-py3.10-cuda124',
|
55
|
+
'2.4.0-py3.11-cuda124',
|
56
|
+
'2.4.1-py3.8-cuda124',
|
57
|
+
'2.4.1-py3.9-cuda124',
|
58
|
+
'2.4.1-py3.10-cuda124',
|
59
|
+
'2.4.1-py3.11-cuda124',
|
60
|
+
]
|
61
|
+
AVAILABLE_PYTHON_IMAGES = ['3.8', '3.9', '3.10', '3.11', '3.12', '3.13']
|
62
|
+
PYTHON_BASE_IMAGE = 'public.ecr.aws/clarifai-models/python-base:{python_version}'
|
63
|
+
TORCH_BASE_IMAGE = 'public.ecr.aws/clarifai-models/torch:{torch_version}-py{python_version}-cuda{cuda_version}'
|
64
|
+
|
26
65
|
CONCEPTS_REQUIRED_MODEL_TYPE = [
|
27
66
|
'visual-classifier', 'visual-detector', 'visual-segmenter', 'text-classifier'
|
28
67
|
]
|
@@ -87,7 +126,7 @@ class ModelUploader:
|
|
87
126
|
user_id = model.get('user_id')
|
88
127
|
app_id = model.get('app_id')
|
89
128
|
|
90
|
-
base = os.environ.get('CLARIFAI_API_BASE', 'https://api
|
129
|
+
base = os.environ.get('CLARIFAI_API_BASE', 'https://api.clarifai.com')
|
91
130
|
|
92
131
|
self._client = BaseClient(user_id=user_id, app_id=app_id, base=base)
|
93
132
|
|
@@ -144,18 +183,46 @@ class ModelUploader:
|
|
144
183
|
)
|
145
184
|
return self.client.STUB.PostModels(request)
|
146
185
|
|
186
|
+
def _parse_requirements(self):
|
187
|
+
# parse the user's requirements.txt to determine the proper base image to build on top of, based on the torch and other large dependencies and it's versions
|
188
|
+
# List of dependencies to look for
|
189
|
+
dependencies = [
|
190
|
+
'torch',
|
191
|
+
]
|
192
|
+
# Escape dependency names for regex
|
193
|
+
dep_pattern = '|'.join(map(re.escape, dependencies))
|
194
|
+
# All possible version specifiers
|
195
|
+
version_specifiers = '==|>=|<=|!=|~=|>|<'
|
196
|
+
# Compile a regex pattern with verbose mode for readability
|
197
|
+
pattern = re.compile(r"""
|
198
|
+
^\s* # Start of line, optional whitespace
|
199
|
+
(?P<dependency>""" + dep_pattern + r""") # Dependency name
|
200
|
+
\s* # Optional whitespace
|
201
|
+
(?P<specifier>""" + version_specifiers + r""")? # Optional version specifier
|
202
|
+
\s* # Optional whitespace
|
203
|
+
(?P<version>[^\s;]+)? # Optional version (up to space or semicolon)
|
204
|
+
""", re.VERBOSE)
|
205
|
+
|
206
|
+
deendencies_version = {}
|
207
|
+
with open(os.path.join(self.folder, 'requirements.txt'), 'r') as file:
|
208
|
+
for line in file:
|
209
|
+
# Skip empty lines and comments
|
210
|
+
line = line.strip()
|
211
|
+
if not line or line.startswith('#'):
|
212
|
+
continue
|
213
|
+
match = pattern.match(line)
|
214
|
+
if match:
|
215
|
+
dependency = match.group('dependency')
|
216
|
+
version = match.group('version')
|
217
|
+
deendencies_version[dependency] = version if version else None
|
218
|
+
return deendencies_version
|
219
|
+
|
147
220
|
def create_dockerfile(self):
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
'Dockerfile.cuda.template',
|
154
|
-
)
|
155
|
-
else:
|
156
|
-
dockerfile_template = os.path.join(
|
157
|
-
os.path.dirname(os.path.dirname(__file__)), 'dockerfile_template',
|
158
|
-
'Dockerfile.cpu.template')
|
221
|
+
dockerfile_template = os.path.join(
|
222
|
+
os.path.dirname(os.path.dirname(__file__)),
|
223
|
+
'dockerfile_template',
|
224
|
+
'Dockerfile.template',
|
225
|
+
)
|
159
226
|
|
160
227
|
with open(dockerfile_template, 'r') as template_file:
|
161
228
|
dockerfile_template = template_file.read()
|
@@ -166,6 +233,11 @@ class ModelUploader:
|
|
166
233
|
build_info = self.config.get('build_info', {})
|
167
234
|
if 'python_version' in build_info:
|
168
235
|
python_version = build_info['python_version']
|
236
|
+
if python_version not in self.AVAILABLE_PYTHON_IMAGES:
|
237
|
+
logger.error(
|
238
|
+
f"Python version {python_version} not supported, please use one of the following versions: {self.AVAILABLE_PYTHON_IMAGES}"
|
239
|
+
)
|
240
|
+
return
|
169
241
|
logger.info(
|
170
242
|
f"Using Python version {python_version} from the config file to build the Dockerfile")
|
171
243
|
else:
|
@@ -174,10 +246,26 @@ class ModelUploader:
|
|
174
246
|
)
|
175
247
|
python_version = self.DEFAULT_PYTHON_VERSION
|
176
248
|
|
249
|
+
base_image = self.PYTHON_BASE_IMAGE.format(python_version=python_version)
|
250
|
+
|
251
|
+
# Parse the requirements.txt file to determine the base image
|
252
|
+
dependencies = self._parse_requirements()
|
253
|
+
if 'torch' in dependencies and dependencies['torch']:
|
254
|
+
torch_version = dependencies['torch']
|
255
|
+
|
256
|
+
for image in self.AVAILABLE_TORCH_IMAGES:
|
257
|
+
if torch_version in image and f'py{python_version}' in image:
|
258
|
+
base_image = self.TORCH_BASE_IMAGE.format(
|
259
|
+
torch_version=torch_version,
|
260
|
+
python_version=python_version,
|
261
|
+
cuda_version=self.DEFAULT_CUDA_VERSION)
|
262
|
+
logger.info(f"Using Torch version {torch_version} base image to build the Docker image")
|
263
|
+
break
|
264
|
+
|
177
265
|
# Replace placeholders with actual values
|
178
266
|
dockerfile_content = dockerfile_template.safe_substitute(
|
179
|
-
PYTHON_VERSION=python_version,
|
180
267
|
name='main',
|
268
|
+
BASE_IMAGE=base_image,
|
181
269
|
)
|
182
270
|
|
183
271
|
# Write Dockerfile
|
@@ -6,25 +6,32 @@ from clarifai.utils.logging import logger
|
|
6
6
|
|
7
7
|
|
8
8
|
def download_input(input):
|
9
|
+
_download_input_data(input.data)
|
10
|
+
if input.data.parts:
|
11
|
+
for i in range(len(input.data.parts)):
|
12
|
+
_download_input_data(input.data.parts[i].data)
|
13
|
+
|
14
|
+
|
15
|
+
def _download_input_data(input_data):
|
9
16
|
"""
|
10
17
|
This function will download any urls that are not already bytes.
|
11
18
|
"""
|
12
|
-
if
|
19
|
+
if input_data.image.url and not input_data.image.base64:
|
13
20
|
# Download the image
|
14
|
-
with fsspec.open(
|
15
|
-
|
16
|
-
if
|
21
|
+
with fsspec.open(input_data.image.url, 'rb') as f:
|
22
|
+
input_data.image.base64 = f.read()
|
23
|
+
if input_data.video.url and not input_data.video.base64:
|
17
24
|
# Download the video
|
18
|
-
with fsspec.open(
|
19
|
-
|
20
|
-
if
|
25
|
+
with fsspec.open(input_data.video.url, 'rb') as f:
|
26
|
+
input_data.video.base64 = f.read()
|
27
|
+
if input_data.audio.url and not input_data.audio.base64:
|
21
28
|
# Download the audio
|
22
|
-
with fsspec.open(
|
23
|
-
|
24
|
-
if
|
29
|
+
with fsspec.open(input_data.audio.url, 'rb') as f:
|
30
|
+
input_data.audio.base64 = f.read()
|
31
|
+
if input_data.text.url and not input_data.text.raw:
|
25
32
|
# Download the text
|
26
|
-
with fsspec.open(
|
27
|
-
|
33
|
+
with fsspec.open(input_data.text.url, 'r') as f:
|
34
|
+
input_data.text.raw = f.read()
|
28
35
|
|
29
36
|
|
30
37
|
def ensure_urls_downloaded(request, max_threads=128):
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Metadata-Version: 2.1
|
2
2
|
Name: clarifai
|
3
|
-
Version: 10.
|
3
|
+
Version: 10.11.0
|
4
4
|
Summary: Clarifai Python SDK
|
5
5
|
Home-page: https://github.com/Clarifai/clarifai-python
|
6
6
|
Author: Clarifai
|
@@ -20,22 +20,22 @@ Classifier: Operating System :: OS Independent
|
|
20
20
|
Requires-Python: >=3.8
|
21
21
|
Description-Content-Type: text/markdown
|
22
22
|
License-File: LICENSE
|
23
|
-
Requires-Dist: clarifai-grpc
|
24
|
-
Requires-Dist: clarifai-protocol
|
25
|
-
Requires-Dist: numpy
|
26
|
-
Requires-Dist: tqdm
|
27
|
-
Requires-Dist: tritonclient
|
28
|
-
Requires-Dist: rich
|
29
|
-
Requires-Dist: PyYAML
|
30
|
-
Requires-Dist: schema
|
31
|
-
Requires-Dist: Pillow
|
32
|
-
Requires-Dist: inquirerpy
|
33
|
-
Requires-Dist: tabulate
|
34
|
-
Requires-Dist: protobuf
|
35
|
-
Requires-Dist: fsspec
|
36
|
-
Requires-Dist: click
|
23
|
+
Requires-Dist: clarifai-grpc>=10.10.2
|
24
|
+
Requires-Dist: clarifai-protocol>=0.0.6
|
25
|
+
Requires-Dist: numpy>=1.22.0
|
26
|
+
Requires-Dist: tqdm>=4.65.0
|
27
|
+
Requires-Dist: tritonclient>=2.34.0
|
28
|
+
Requires-Dist: rich>=13.4.2
|
29
|
+
Requires-Dist: PyYAML>=6.0.1
|
30
|
+
Requires-Dist: schema==0.7.5
|
31
|
+
Requires-Dist: Pillow>=9.5.0
|
32
|
+
Requires-Dist: inquirerpy==0.3.4
|
33
|
+
Requires-Dist: tabulate>=0.9.0
|
34
|
+
Requires-Dist: protobuf==5.27.3
|
35
|
+
Requires-Dist: fsspec==2024.6.1
|
36
|
+
Requires-Dist: click==8.1.7
|
37
37
|
Provides-Extra: all
|
38
|
-
Requires-Dist: pycocotools
|
38
|
+
Requires-Dist: pycocotools==2.0.6; extra == "all"
|
39
39
|
|
40
40
|
<h1 align="center">
|
41
41
|
<a href="https://www.clarifai.com/"><img alt="Clarifai" title="Clarifai" src="https://github.com/user-attachments/assets/623b883b-7fe5-4b95-bbfa-8691f5779af4"></a>
|
@@ -1,4 +1,4 @@
|
|
1
|
-
clarifai/__init__.py,sha256=
|
1
|
+
clarifai/__init__.py,sha256=QlkUWf1vZWZO1B4-0PjOW339CFFq5tlWz3teIz9l6-E,24
|
2
2
|
clarifai/cli.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
3
3
|
clarifai/errors.py,sha256=RwzTajwds51wLD0MVlMC5kcpBnzRpreDLlazPSBZxrg,2605
|
4
4
|
clarifai/versions.py,sha256=jctnczzfGk_S3EnVqb2FjRKfSREkNmvNEwAAa_VoKiQ,222
|
@@ -7,17 +7,17 @@ clarifai/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
7
7
|
clarifai/cli/base.py,sha256=okuBNlMmLEQw9-0f4yzemCtneNNRTVXUugCwD58-ZtQ,3417
|
8
8
|
clarifai/cli/compute_cluster.py,sha256=N2dNQNJEPg9nxsb8x2igEzYuGRzjn7l4kNttjFIxmhI,1827
|
9
9
|
clarifai/cli/deployment.py,sha256=sUEuz5-rtozMx8deVcJXLi6lHsP2jc8x3y2MpUAVfqY,2506
|
10
|
-
clarifai/cli/model.py,sha256=
|
10
|
+
clarifai/cli/model.py,sha256=0DRp3TjySvRwyUnt-v1nqhYOcTnVtEZ0jdXH4TBtuQ8,2183
|
11
11
|
clarifai/cli/nodepool.py,sha256=yihxS_rIFoBBKzRlqBX8Ab42iPpBMJrJFsk8saph6ms,3049
|
12
12
|
clarifai/client/__init__.py,sha256=xI1U0l5AZdRThvQAXCLsd9axxyFzXXJ22m8LHqVjQRU,662
|
13
13
|
clarifai/client/app.py,sha256=6pckYme1urV2YJjLIYfeZ-vH0Z5YSQa51jzIMcEfwug,38342
|
14
14
|
clarifai/client/base.py,sha256=hSHOqkXbSKyaRDeylMMnkhUHCAHhEqno4KI0CXGziBA,7536
|
15
15
|
clarifai/client/compute_cluster.py,sha256=EvW9TJjPvInUlggfg1A98sxoWH8_PY5rCVXZhsj6ac0,8705
|
16
|
-
clarifai/client/dataset.py,sha256=
|
17
|
-
clarifai/client/deployment.py,sha256=
|
18
|
-
clarifai/client/input.py,sha256=
|
16
|
+
clarifai/client/dataset.py,sha256=AIzwbYs-ExkmUqW9nuEJgpW8-D7rjA1PtopU5Iu6YZE,32018
|
17
|
+
clarifai/client/deployment.py,sha256=w7Y6pA1rYG4KRK1SwusRZc2sQRXlG8wezuVdzSWpCo0,2586
|
18
|
+
clarifai/client/input.py,sha256=GvrPV2chThNjimekBIleuIr6AD10_wrfc-1Hm5C4NQ8,45648
|
19
19
|
clarifai/client/lister.py,sha256=03KGMvs5RVyYqxLsSrWhNc34I8kiF1Ph0NeyEwu7nMU,2082
|
20
|
-
clarifai/client/model.py,sha256=
|
20
|
+
clarifai/client/model.py,sha256=8koRWV_-cLLtZYFHQzNxMFw2X1VXAZ6aJI-1cOp6r4U,84655
|
21
21
|
clarifai/client/module.py,sha256=FTkm8s9m-EaTKN7g9MnLhGJ9eETUfKG7aWZ3o1RshYs,4204
|
22
22
|
clarifai/client/nodepool.py,sha256=la3vTFrO4LX8zm2eQ5jqf2L0-kQ63Dano8FibadoZbk,10152
|
23
23
|
clarifai/client/search.py,sha256=GaPWN6JmTQGZaCHr6U1yv0zqR6wKFl7i9IVLg2ul1CI,14254
|
@@ -36,12 +36,12 @@ clarifai/constants/search.py,sha256=yYEqTaFg-KdnpJE_Ytp-EPVHIIC395iNtZrpVlLIf4o,
|
|
36
36
|
clarifai/constants/workflow.py,sha256=cECq1xdvf44MCdtK2AbkiuuwhyL-6OWZdQfYbsLKy_o,33
|
37
37
|
clarifai/datasets/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
38
38
|
clarifai/datasets/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
39
|
-
clarifai/datasets/export/inputs_annotations.py,sha256=
|
39
|
+
clarifai/datasets/export/inputs_annotations.py,sha256=3AtUBrMIjw8H3ehDsJFYcBFoAZ1QKQo1hXTMsHh8f20,10159
|
40
40
|
clarifai/datasets/upload/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
41
41
|
clarifai/datasets/upload/base.py,sha256=UIc0ufyIBCrb83_sFpv21L8FshsX4nwsLYQkdlJfzD4,2357
|
42
42
|
clarifai/datasets/upload/features.py,sha256=jv2x7jGZKS-LMt87sEZNBwwOskHbP26XTMjoiaSA5pg,2024
|
43
43
|
clarifai/datasets/upload/image.py,sha256=HlCsfEMu_C4GVecGSv52RUJ6laLW8H64Pfj_FQyX6qg,8580
|
44
|
-
clarifai/datasets/upload/multimodal.py,sha256=
|
44
|
+
clarifai/datasets/upload/multimodal.py,sha256=4jBFXgT44tPFHm3O3lYcnKM046qjUNJJaR0oBVTa3HM,2309
|
45
45
|
clarifai/datasets/upload/text.py,sha256=boVJenfQZKf79aXu8CEP4g_ANzX5ROdd06g07O7RnXU,2198
|
46
46
|
clarifai/datasets/upload/utils.py,sha256=BerWhq40ZUN30z6VImlc93eZtT-1vI18AMgSOuNzJEM,9647
|
47
47
|
clarifai/datasets/upload/loaders/README.md,sha256=aNRutSCTzLp2ruIZx74ZkN5AxpzwKOxMa7OzabnKpwg,2980
|
@@ -62,20 +62,19 @@ clarifai/rag/rag.py,sha256=L10TcV9E0PF1aJ2Nn1z1x6WVoUoGxbKt20lQXg8ksqo,12594
|
|
62
62
|
clarifai/rag/utils.py,sha256=yr1jAcbpws4vFGBqlAwPPE7v1DRba48g8gixLFw8OhQ,4070
|
63
63
|
clarifai/runners/__init__.py,sha256=3vr4RVvN1IRy2SxJpyycAAvrUBbH-mXR7pqUmu4w36A,412
|
64
64
|
clarifai/runners/server.py,sha256=CVLrv2DjzCvKVXcJ4SWvcFWUZq0bdlBmyEpfVlfgT2A,4902
|
65
|
-
clarifai/runners/dockerfile_template/Dockerfile.
|
66
|
-
clarifai/runners/dockerfile_template/Dockerfile.cuda.template,sha256=8uQp2sX_bIzgQk84FNlS19PwKH_l0Qi54xE7_NVxUTE,3314
|
65
|
+
clarifai/runners/dockerfile_template/Dockerfile.template,sha256=-T38Rscpjot8WVuUTUq1_N0xz_gg653FOHV4XQYGG-U,1453
|
67
66
|
clarifai/runners/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
68
|
-
clarifai/runners/models/base_typed_model.py,sha256=
|
67
|
+
clarifai/runners/models/base_typed_model.py,sha256=DWEUK5ge9NVZE6LkT3BNTFYjYMPHz-nDgPA48Y0DGXU,7859
|
69
68
|
clarifai/runners/models/model_class.py,sha256=9JSPAr4U4K7xI0kSl-q0mHB06zknm2OR-8XIgBCto94,1611
|
70
|
-
clarifai/runners/models/model_run_locally.py,sha256=
|
69
|
+
clarifai/runners/models/model_run_locally.py,sha256=6LgRzTTDjmGvAV6gEUhW3T3yoCMpLqkOUGa4anZUbig,7144
|
71
70
|
clarifai/runners/models/model_runner.py,sha256=3vzoastQxkGRDK8T9aojDsLNBb9A3IiKm6YmbFrE9S0,6241
|
72
71
|
clarifai/runners/models/model_servicer.py,sha256=X4715PVA5PBurRTYcwSEudg8fShGV6InAF4mmRlRcHg,2826
|
73
|
-
clarifai/runners/models/model_upload.py,sha256=
|
72
|
+
clarifai/runners/models/model_upload.py,sha256=N3KCAHibeTJdnlDx1vixYkV1vl6lI6GA8LL7TuvUax8,20430
|
74
73
|
clarifai/runners/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
75
74
|
clarifai/runners/utils/data_handler.py,sha256=sxy9zlAgI6ETuxCQhUgEXAn2GCsaW1GxpK6GTaMne0g,6966
|
76
75
|
clarifai/runners/utils/data_utils.py,sha256=R1iQ82TuQ9JwxCJk8yEB1Lyb0BYVhVbWJI9YDi1zGOs,318
|
77
76
|
clarifai/runners/utils/loader.py,sha256=1oktDUQA1Lpv0NiCXFwoxpp0jqqbvB7sWvpymwyWY2E,4243
|
78
|
-
clarifai/runners/utils/url_fetcher.py,sha256
|
77
|
+
clarifai/runners/utils/url_fetcher.py,sha256=v_8JOWmkyFAzsBulsieKX7Nfjy1Yg7wGSZeqfEvw2cg,1640
|
79
78
|
clarifai/schema/search.py,sha256=JjTi8ammJgZZ2OGl4K6tIA4zEJ1Fr2ASZARXavI1j5c,2448
|
80
79
|
clarifai/urls/helper.py,sha256=tjoMGGHuWX68DUB0pk4MEjrmFsClUAQj2jmVEM_Sy78,4751
|
81
80
|
clarifai/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
@@ -92,9 +91,9 @@ clarifai/workflows/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
|
|
92
91
|
clarifai/workflows/export.py,sha256=vICRhIreqDSShxLKjHNM2JwzKsf1B4fdXB0ciMcA70k,1945
|
93
92
|
clarifai/workflows/utils.py,sha256=nGeB_yjVgUO9kOeKTg4OBBaBz-AwXI3m-huSVj-9W18,1924
|
94
93
|
clarifai/workflows/validate.py,sha256=yJq03MaJqi5AK3alKGJJBR89xmmjAQ31sVufJUiOqY8,2556
|
95
|
-
clarifai-10.
|
96
|
-
clarifai-10.
|
97
|
-
clarifai-10.
|
98
|
-
clarifai-10.
|
99
|
-
clarifai-10.
|
100
|
-
clarifai-10.
|
94
|
+
clarifai-10.11.0.dist-info/LICENSE,sha256=mUqF_d12-qE2n41g7C5_sq-BMLOcj6CNN-jevr15YHU,555
|
95
|
+
clarifai-10.11.0.dist-info/METADATA,sha256=C1A5Myx1yQUdZRhpNDzOV-zm6a7OfBVbSireNn1e7mY,19550
|
96
|
+
clarifai-10.11.0.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
|
97
|
+
clarifai-10.11.0.dist-info/entry_points.txt,sha256=X9FZ4Z-i_r2Ud1RpZ9sNIFYuu_-9fogzCMCRUD9hyX0,51
|
98
|
+
clarifai-10.11.0.dist-info/top_level.txt,sha256=wUMdCQGjkxaynZ6nZ9FAnvBUCgp5RJUVFSy2j-KYo0s,9
|
99
|
+
clarifai-10.11.0.dist-info/RECORD,,
|
@@ -1,83 +0,0 @@
|
|
1
|
-
# Build a virtualenv containing necessary system libraries and Python packages
|
2
|
-
# for users to install their own packages while also being distroless.
|
3
|
-
# * Install python3-venv
|
4
|
-
# * Install gcc libpython3-dev to compile C Python modules
|
5
|
-
# * In the virtualenv: Update pip setuputils and wheel to support building new packages
|
6
|
-
# * Export environment variables to use the virtualenv by default
|
7
|
-
# * Create a non-root user with minimal privileges and use it
|
8
|
-
ARG TARGET_PLATFORM=linux/amd64
|
9
|
-
FROM --platform=$TARGET_PLATFORM public.ecr.aws/docker/library/python:${PYTHON_VERSION}-slim-bookworm as build
|
10
|
-
|
11
|
-
ENV DEBIAN_FRONTEND=noninteractive
|
12
|
-
RUN apt-get update && \
|
13
|
-
apt-get install --no-install-suggests --no-install-recommends --yes \
|
14
|
-
software-properties-common \
|
15
|
-
gcc \
|
16
|
-
libpython3-dev && \
|
17
|
-
python${PYTHON_VERSION} -m venv /venv && \
|
18
|
-
/venv/bin/pip install --disable-pip-version-check --upgrade pip setuptools wheel && \
|
19
|
-
apt-get clean && rm -rf /var/lib/apt/lists/*
|
20
|
-
|
21
|
-
# Set environment variables to use virtualenv by default
|
22
|
-
ENV VIRTUAL_ENV=/venv
|
23
|
-
ENV PATH="$VIRTUAL_ENV/bin:$PATH"
|
24
|
-
|
25
|
-
#############################
|
26
|
-
# User specific requirements
|
27
|
-
#############################
|
28
|
-
COPY requirements.txt .
|
29
|
-
|
30
|
-
# Install requirements and cleanup before leaving this line.
|
31
|
-
# Note(zeiler): this could be in a future template as {{model_python_deps}}
|
32
|
-
RUN python -m pip install -r requirements.txt && rm -rf /root/.cache
|
33
|
-
|
34
|
-
# Install Clarifai SDK
|
35
|
-
RUN python -m pip install clarifai
|
36
|
-
|
37
|
-
#############################
|
38
|
-
# Finally copy everything we built into a distroless image for runtime.
|
39
|
-
######################>#######
|
40
|
-
ARG TARGET_PLATFORM=linux/amd64
|
41
|
-
FROM --platform=$TARGET_PLATFORM gcr.io/distroless/python3-debian12:latest
|
42
|
-
# FROM --platform=$TARGET_PLATFORM gcr.io/distroless/python3-debian12:debug
|
43
|
-
ARG PYTHON_VERSION=${PYTHON_VERSION}
|
44
|
-
# needed to call pip directly
|
45
|
-
COPY --from=build /bin/sh /bin/sh
|
46
|
-
|
47
|
-
# virtual env
|
48
|
-
COPY --from=build /venv /venv
|
49
|
-
|
50
|
-
# We have to overwrite the python3 binary that the distroless image uses
|
51
|
-
COPY --from=build /usr/local/bin/python${PYTHON_VERSION} /usr/bin/python3
|
52
|
-
# And also copy in all the lib files for it.
|
53
|
-
COPY --from=build /usr/local/lib/ /usr/lib/
|
54
|
-
|
55
|
-
# Set environment variables to use virtualenv by default
|
56
|
-
ENV VIRTUAL_ENV=/venv
|
57
|
-
ENV PYTHONPATH=${PYTHONPATH}:${VIRTUAL_ENV}/lib/python${PYTHON_VERSION}/site-packages
|
58
|
-
|
59
|
-
# These will be set by the templaing system.
|
60
|
-
ENV CLARIFAI_PAT=${CLARIFAI_PAT}
|
61
|
-
ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
|
62
|
-
ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
|
63
|
-
ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
|
64
|
-
ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
|
65
|
-
ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
|
66
|
-
|
67
|
-
# Set the NUMBA cache dir to /tmp
|
68
|
-
ENV NUMBA_CACHE_DIR=/tmp/numba_cache
|
69
|
-
ENV HOME=/tmp
|
70
|
-
|
71
|
-
# Set the working directory to /app
|
72
|
-
WORKDIR /app
|
73
|
-
|
74
|
-
# Copy the current folder into /app/model_dir that the SDK will expect.
|
75
|
-
# Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
|
76
|
-
COPY . /app/model_dir/${name}
|
77
|
-
|
78
|
-
# Add the model directory to the python path.
|
79
|
-
ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
|
80
|
-
|
81
|
-
# Finally run the clarifai entrypoint to start the runner loop and local dev server.
|
82
|
-
# Note(zeiler): we may want to make this a clarifai CLI call.
|
83
|
-
CMD ["-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]
|
File without changes
|
File without changes
|
File without changes
|