clarifai 11.2.4rc2__py3-none-any.whl → 11.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- clarifai/__init__.py +1 -1
- clarifai/client/deployment.py +3 -1
- clarifai/client/model.py +7 -3
- clarifai/runners/models/model_builder.py +0 -55
- clarifai/runners/models/model_class.py +4 -7
- clarifai/runners/utils/data_types.py +3 -3
- clarifai/runners/utils/data_utils.py +35 -36
- clarifai/runners/utils/method_signatures.py +0 -8
- clarifai/runners/utils/openai_convertor.py +126 -186
- clarifai/utils/protobuf.py +143 -0
- {clarifai-11.2.4rc2.dist-info → clarifai-11.3.0.dist-info}/METADATA +4 -3
- clarifai-11.3.0.dist-info/RECORD +107 -0
- {clarifai-11.2.4rc2.dist-info → clarifai-11.3.0.dist-info}/WHEEL +1 -1
- clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/__pycache__/errors.cpython-310.pyc +0 -0
- clarifai/__pycache__/versions.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/app.cpython-39.pyc +0 -0
- clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/compute_cluster.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/deployment.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/nodepool.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
- clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
- clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
- clarifai/client/cli/__init__.py +0 -0
- clarifai/client/cli/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/base_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/__pycache__/model_cli.cpython-310.pyc +0 -0
- clarifai/client/cli/base_cli.py +0 -88
- clarifai/client/cli/model_cli.py +0 -29
- clarifai/constants/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/input.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/constants/__pycache__/workflow.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/export/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/multimodal.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
- clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/datasets/upload/loaders/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/modules/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
- clarifai/rag/__pycache__/rag.cpython-39.pyc +0 -0
- clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/dockerfile_template/Dockerfile.cpu.template +0 -31
- clarifai/runners/dockerfile_template/Dockerfile.cuda.template +0 -42
- clarifai/runners/dockerfile_template/Dockerfile.nim +0 -71
- clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/base_typed_model.cpython-39.pyc +0 -0
- clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310-pytest-7.1.2.pyc +0 -0
- clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
- clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
- clarifai/runners/models/base_typed_model.py +0 -238
- clarifai/runners/models/model_class_refract.py +0 -80
- clarifai/runners/models/model_upload.py +0 -607
- clarifai/runners/models/temp.py +0 -25
- clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/buffered_stream.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/constants.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_handler.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/data_utils.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/grpc_server.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/health.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/logging.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/stream_source.cpython-39.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-38.pyc +0 -0
- clarifai/runners/utils/__pycache__/url_fetcher.cpython-39.pyc +0 -0
- clarifai/runners/utils/data_handler.py +0 -292
- clarifai/runners/utils/data_handler_refract.py +0 -213
- clarifai/runners/utils/logger.py +0 -0
- clarifai/runners/utils/openai_format.py +0 -87
- clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
- clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/__pycache__/cli.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/constants.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
- clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/utils/evaluation/__pycache__/main.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/__init__.cpython-39.pyc +0 -0
- clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
- clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
- clarifai-11.2.4rc2.dist-info/RECORD +0 -241
- {clarifai-11.2.4rc2.dist-info → clarifai-11.3.0.dist-info}/entry_points.txt +0 -0
- {clarifai-11.2.4rc2.dist-info → clarifai-11.3.0.dist-info/licenses}/LICENSE +0 -0
- {clarifai-11.2.4rc2.dist-info → clarifai-11.3.0.dist-info}/top_level.txt +0 -0
@@ -1,42 +0,0 @@
|
|
1
|
-
ARG TARGET_PLATFORM=linux/amd64
|
2
|
-
FROM --platform=$TARGET_PLATFORM ${BASE_IMAGE} as build
|
3
|
-
|
4
|
-
ENV DEBIAN_FRONTEND=noninteractive
|
5
|
-
|
6
|
-
#############################
|
7
|
-
# User specific requirements
|
8
|
-
#############################
|
9
|
-
COPY requirements.txt .
|
10
|
-
|
11
|
-
# Install requirements and cleanup before leaving this line.
|
12
|
-
# Note(zeiler): this could be in a future template as {{model_python_deps}}
|
13
|
-
RUN pip install --no-cache-dir -r requirements.txt
|
14
|
-
|
15
|
-
# Install Clarifai SDK
|
16
|
-
RUN pip install --no-cache-dir clarifai
|
17
|
-
|
18
|
-
# These will be set by the templaing system.
|
19
|
-
ENV CLARIFAI_PAT=${CLARIFAI_PAT}
|
20
|
-
ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
|
21
|
-
ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
|
22
|
-
ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
|
23
|
-
ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
|
24
|
-
ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
|
25
|
-
|
26
|
-
# Set the NUMBA cache dir to /tmp
|
27
|
-
ENV NUMBA_CACHE_DIR=/tmp/numba_cache
|
28
|
-
ENV HOME=/tmp
|
29
|
-
|
30
|
-
# Set the working directory to /app
|
31
|
-
WORKDIR /app
|
32
|
-
|
33
|
-
# Copy the current folder into /app/model_dir that the SDK will expect.
|
34
|
-
# Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
|
35
|
-
COPY . /app/model_dir/${name}
|
36
|
-
|
37
|
-
# Add the model directory to the python path.
|
38
|
-
ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
|
39
|
-
|
40
|
-
# Finally run the clarifai entrypoint to start the runner loop and local dev server.
|
41
|
-
# Note(zeiler): we may want to make this a clarifai CLI call.
|
42
|
-
CMD ["-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]
|
@@ -1,71 +0,0 @@
|
|
1
|
-
FROM nvcr.io/nim/meta/llama-3.1-8b-instruct:1.1.2 as build
|
2
|
-
|
3
|
-
FROM gcr.io/distroless/python3-debian12:debug
|
4
|
-
|
5
|
-
|
6
|
-
COPY --from=build /bin/bash /bin/rbash
|
7
|
-
COPY --from=build /bin/sh /bin/sh
|
8
|
-
COPY --from=build /bin/rsh /bin/rsh
|
9
|
-
|
10
|
-
# we have to overwrite the python3 binary that the distroless image uses
|
11
|
-
COPY --from=build /opt/nim/llm/.venv/bin/python3.10 /usr/bin/python3
|
12
|
-
COPY --from=build /opt/nim/llm/.venv/bin/python3.10 /usr/local/bin/python3.10
|
13
|
-
|
14
|
-
# also copy in all the lib files for it.
|
15
|
-
COPY --from=build /lib /lib
|
16
|
-
COPY --from=build /lib64 /lib64
|
17
|
-
COPY --from=build /usr/lib/ /usr/lib/
|
18
|
-
COPY --from=build /usr/local/lib/ /usr/local/lib/
|
19
|
-
# ldconfig is needed to update the shared library cache so system libraries (like CUDA) can be found
|
20
|
-
COPY --from=build /usr/sbin/ldconfig /sbin/ldconfig
|
21
|
-
COPY --from=build /usr/sbin/ldconfig.real /sbin/ldconfig.real
|
22
|
-
COPY --from=build /etc/ld.so.conf /etc/ld.so.conf
|
23
|
-
COPY --from=build /etc/ld.so.cache /etc/ld.so.cache
|
24
|
-
COPY --from=build /etc/ld.so.conf.d/ /etc/ld.so.conf.d/
|
25
|
-
|
26
|
-
# COPY NIM files
|
27
|
-
COPY --from=build /opt /opt
|
28
|
-
COPY --from=build /etc/nim /etc/nim
|
29
|
-
|
30
|
-
# Set environment variables to use the nim libraries and python
|
31
|
-
ENV PYTHONPATH=${PYTHONPATH}:/opt/nim/llm/.venv/lib/python3.10/site-packages:/opt/nim/llm
|
32
|
-
ENV PATH="/opt/nim/llm/.venv/bin:/opt/hpcx/ucc/bin:/opt/hpcx/ucx/bin:/opt/hpcx/ompi/bin:$PATH"
|
33
|
-
|
34
|
-
ENV LD_LIBRARY_PATH="/opt/hpcx/ucc/lib/ucc:/opt/hpcx/ucc/lib:/opt/hpcx/ucx/lib/ucx:/opt/hpcx/ucx/lib:/opt/hpcx/ompi/lib:/opt/hpcx/ompi/lib/openmpi:/opt/nim/llm/.venv/lib/python3.10/site-packages/tensorrt_llm/libs:/opt/nim/llm/.venv/lib/python3.10/site-packages/nvidia/cublas/lib:/opt/nim/llm/.venv/lib/python3.10/site-packages/tensorrt_libs:/opt/nim/llm/.venv/lib/python3.10/site-packages/nvidia/nccl/lib:$LD_LIBRARY_PATH"
|
35
|
-
|
36
|
-
ENV LIBRARY_PATH=/opt/hpcx/ucc/lib:/opt/hpcx/ucx/lib:/opt/hpcx/ompi/lib:$LIBRARY_PATH
|
37
|
-
|
38
|
-
ENV CPATH=/opt/hpcx/ompi/include:/opt/hpcx/ucc/include:/opt/hpcx/ucx/include:$CPATH
|
39
|
-
ENV LLM_PROJECT_DIR=/opt/nim/llm
|
40
|
-
|
41
|
-
# Set environment variables for MPI
|
42
|
-
ENV OMPI_HOME=/opt/hpcx/ompi
|
43
|
-
ENV HPCX_MPI_DIR=/opt/hpcx/ompi
|
44
|
-
ENV MPIf_HOME=/opt/hpcx/ompi
|
45
|
-
ENV OPAL_PREFIX=/opt/hpcx/ompi
|
46
|
-
|
47
|
-
# Set environment variables for UCC
|
48
|
-
ENV UCC_DIR=/opt/hpcx/ucc/lib/cmake/ucc
|
49
|
-
ENV UCC_HOME=/opt/hpcx/ucc
|
50
|
-
ENV HPCX_UCC_DIR=/opt/hpcx/ucc
|
51
|
-
ENV USE_UCC=1
|
52
|
-
ENV USE_SYSTEM_UCC=1
|
53
|
-
|
54
|
-
# Set environment variables for HPC-X
|
55
|
-
ENV HPCX_DIR=/opt/hpcx
|
56
|
-
ENV HPCX_UCX_DIR=/opt/hpcx/ucx
|
57
|
-
ENV HPCX_MPI_DIR=/opt/hpcx/ompi
|
58
|
-
|
59
|
-
# Set environment variables for UCX
|
60
|
-
ENV UCX_DIR=/opt/hpcx/ucx/lib/cmake/ucx
|
61
|
-
ENV UCX_HOME=/opt/hpcx/ucx
|
62
|
-
|
63
|
-
ENV HOME=/opt/nim/llm
|
64
|
-
|
65
|
-
# ln is needed to create symbolic links (needed by nvidia-container-runtime)
|
66
|
-
COPY --from=build /usr/bin/ln /usr/bin/ln
|
67
|
-
|
68
|
-
# Run ldconfig in the build stage to update the library cache else CUDA libraries won't be found
|
69
|
-
RUN ldconfig -v
|
70
|
-
|
71
|
-
SHELL ["/bin/rbash", "-c"]
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
Binary file
|
@@ -1,238 +0,0 @@
|
|
1
|
-
import itertools
|
2
|
-
from typing import Any, Dict, Iterator, List, Tuple
|
3
|
-
|
4
|
-
import numpy as np
|
5
|
-
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
6
|
-
from clarifai_grpc.grpc.api.service_pb2 import PostModelOutputsRequest
|
7
|
-
from google.protobuf import json_format
|
8
|
-
|
9
|
-
from ..utils.data_handler import InputDataHandler, OutputDataHandler
|
10
|
-
from .model_class import ModelClass
|
11
|
-
|
12
|
-
|
13
|
-
class AnyAnyModel(ModelClass):
|
14
|
-
|
15
|
-
def load_model(self):
|
16
|
-
"""
|
17
|
-
Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
|
18
|
-
in this method so they are loaded only once for faster inference.
|
19
|
-
"""
|
20
|
-
raise NotImplementedError
|
21
|
-
|
22
|
-
def parse_input_request(
|
23
|
-
self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
|
24
|
-
list_input_dict = [
|
25
|
-
InputDataHandler.from_proto(input).to_python() for input in input_request.inputs
|
26
|
-
]
|
27
|
-
inference_params = json_format.MessageToDict(
|
28
|
-
input_request.model.model_version.output_info.params)
|
29
|
-
|
30
|
-
return list_input_dict, inference_params
|
31
|
-
|
32
|
-
def convert_output_to_proto(self, outputs: list):
|
33
|
-
assert (isinstance(outputs, Iterator) or isinstance(outputs, list) or
|
34
|
-
isinstance(outputs, tuple)), "outputs must be an Iterator"
|
35
|
-
output_protos = []
|
36
|
-
for output in outputs:
|
37
|
-
if isinstance(output, OutputDataHandler):
|
38
|
-
output = output.proto
|
39
|
-
elif isinstance(output, resources_pb2.Output):
|
40
|
-
pass
|
41
|
-
else:
|
42
|
-
raise NotImplementedError
|
43
|
-
output_protos.append(output)
|
44
|
-
|
45
|
-
return service_pb2.MultiOutputResponse(outputs=output_protos)
|
46
|
-
|
47
|
-
def predict_wrapper(
|
48
|
-
self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
|
49
|
-
list_dict_input, inference_params = self.parse_input_request(request)
|
50
|
-
outputs = self.predict(list_dict_input, inference_parameters=inference_params)
|
51
|
-
return self.convert_output_to_proto(outputs)
|
52
|
-
|
53
|
-
def generate_wrapper(
|
54
|
-
self, request: PostModelOutputsRequest) -> Iterator[service_pb2.MultiOutputResponse]:
|
55
|
-
list_dict_input, inference_params = self.parse_input_request(request)
|
56
|
-
outputs = self.generate(list_dict_input, inference_parameters=inference_params)
|
57
|
-
for output in outputs:
|
58
|
-
yield self.convert_output_to_proto(output)
|
59
|
-
|
60
|
-
def _preprocess_stream(
|
61
|
-
self, request: Iterator[PostModelOutputsRequest]) -> Iterator[Tuple[List[Dict], List[Dict]]]:
|
62
|
-
"""Return generator of processed data (from proto to python) and inference parameters like predict and generate"""
|
63
|
-
for i, req in enumerate(request):
|
64
|
-
input_data, _ = self.parse_input_request(req)
|
65
|
-
yield input_data
|
66
|
-
|
67
|
-
def stream_wrapper(self, request: Iterator[PostModelOutputsRequest]
|
68
|
-
) -> Iterator[service_pb2.MultiOutputResponse]:
|
69
|
-
first_request = next(request)
|
70
|
-
_, inference_params = self.parse_input_request(first_request)
|
71
|
-
request_iterator = itertools.chain([first_request], request)
|
72
|
-
outputs = self.stream(self._preprocess_stream(request_iterator), inference_params)
|
73
|
-
for output in outputs:
|
74
|
-
yield self.convert_output_to_proto(output)
|
75
|
-
|
76
|
-
def predict(self, input_data: List[Dict],
|
77
|
-
inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
|
78
|
-
"""
|
79
|
-
Prediction method.
|
80
|
-
|
81
|
-
Args:
|
82
|
-
-----
|
83
|
-
- input_data: is list of dict where key is input type name.
|
84
|
-
* image: np.ndarray
|
85
|
-
* text: str
|
86
|
-
* audio: bytes
|
87
|
-
|
88
|
-
- inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
|
89
|
-
|
90
|
-
Returns:
|
91
|
-
--------
|
92
|
-
List of OutputDataHandler
|
93
|
-
"""
|
94
|
-
raise NotImplementedError
|
95
|
-
|
96
|
-
def generate(self, input_data: List[Dict],
|
97
|
-
inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
|
98
|
-
"""
|
99
|
-
Generate method.
|
100
|
-
|
101
|
-
Args:
|
102
|
-
-----
|
103
|
-
- input_data: is list of dict where key is input type name.
|
104
|
-
* image: np.ndarray
|
105
|
-
* text: str
|
106
|
-
* audio: bytes
|
107
|
-
|
108
|
-
- inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
|
109
|
-
|
110
|
-
Yield:
|
111
|
-
--------
|
112
|
-
List of OutputDataHandler
|
113
|
-
"""
|
114
|
-
raise NotImplementedError
|
115
|
-
|
116
|
-
def stream(self, inputs: Iterator[List[Dict[str, Any]]],
|
117
|
-
inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
|
118
|
-
"""
|
119
|
-
Stream method.
|
120
|
-
|
121
|
-
Args:
|
122
|
-
-----
|
123
|
-
input_request: is an Iterator of Tuple which
|
124
|
-
- First element (List[Dict[str, Union[np.ndarray, str, bytes]]]) is list of dict input data type which keys and values are:
|
125
|
-
* image: np.ndarray
|
126
|
-
* text: str
|
127
|
-
* audio: bytes
|
128
|
-
|
129
|
-
- Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
|
130
|
-
|
131
|
-
Yield:
|
132
|
-
--------
|
133
|
-
List of OutputDataHandler
|
134
|
-
"""
|
135
|
-
raise NotImplementedError
|
136
|
-
|
137
|
-
|
138
|
-
class VisualInputModel(AnyAnyModel):
|
139
|
-
|
140
|
-
def parse_input_request(
|
141
|
-
self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
|
142
|
-
list_input_dict = [
|
143
|
-
InputDataHandler.from_proto(input).image(format="np") for input in input_request.inputs
|
144
|
-
]
|
145
|
-
inference_params = json_format.MessageToDict(
|
146
|
-
input_request.model.model_version.output_info.params)
|
147
|
-
|
148
|
-
return list_input_dict, inference_params
|
149
|
-
|
150
|
-
def load_model(self):
|
151
|
-
"""
|
152
|
-
Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
|
153
|
-
in this method so they are loaded only once for faster inference.
|
154
|
-
"""
|
155
|
-
raise NotImplementedError
|
156
|
-
|
157
|
-
def predict(self, input_data: List[np.ndarray],
|
158
|
-
inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
|
159
|
-
"""
|
160
|
-
Prediction method.
|
161
|
-
|
162
|
-
Args:
|
163
|
-
-----
|
164
|
-
- input_data(List[np.ndarray]): is list of image as np.ndarray type
|
165
|
-
- inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
|
166
|
-
|
167
|
-
Returns:
|
168
|
-
--------
|
169
|
-
List of OutputDataHandler
|
170
|
-
"""
|
171
|
-
raise NotImplementedError
|
172
|
-
|
173
|
-
|
174
|
-
class TextInputModel(AnyAnyModel):
|
175
|
-
|
176
|
-
def load_model(self):
|
177
|
-
"""
|
178
|
-
Load inference time artifacts that are called frequently .e.g. models, tokenizers, etc.
|
179
|
-
in this method so they are loaded only once for faster inference.
|
180
|
-
"""
|
181
|
-
raise NotImplementedError
|
182
|
-
|
183
|
-
def parse_input_request(
|
184
|
-
self, input_request: service_pb2.PostModelOutputsRequest) -> Tuple[List[Dict], Dict]:
|
185
|
-
list_input_text = [InputDataHandler.from_proto(input).text for input in input_request.inputs]
|
186
|
-
inference_params = json_format.MessageToDict(
|
187
|
-
input_request.model.model_version.output_info.params)
|
188
|
-
|
189
|
-
return list_input_text, inference_params
|
190
|
-
|
191
|
-
def predict(self, input_data: List[str],
|
192
|
-
inference_parameters: Dict[str, Any] = {}) -> List[OutputDataHandler]:
|
193
|
-
"""
|
194
|
-
Prediction method.
|
195
|
-
|
196
|
-
Args:
|
197
|
-
-----
|
198
|
-
- input_data(List[str]): is list of text as str type
|
199
|
-
- inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
|
200
|
-
|
201
|
-
Returns:
|
202
|
-
--------
|
203
|
-
List of OutputDataHandler
|
204
|
-
"""
|
205
|
-
raise NotImplementedError
|
206
|
-
|
207
|
-
def generate(self, input_data: List[str],
|
208
|
-
inference_parameters: Dict[str, Any] = {}) -> Iterator[List[OutputDataHandler]]:
|
209
|
-
"""
|
210
|
-
Prediction method.
|
211
|
-
|
212
|
-
Args:
|
213
|
-
-----
|
214
|
-
- input_data(List[str]): is list of text as str type
|
215
|
-
- inference_parameters (Dict[str, Union[bool, str, float, int]]): your inference parameters.
|
216
|
-
|
217
|
-
Yield:
|
218
|
-
--------
|
219
|
-
List of OutputDataHandler
|
220
|
-
"""
|
221
|
-
raise NotImplementedError
|
222
|
-
|
223
|
-
def stream(self, inputs: Iterator[List[str]],
|
224
|
-
inference_params: Dict[str, Any]) -> Iterator[List[OutputDataHandler]]:
|
225
|
-
"""
|
226
|
-
Stream method.
|
227
|
-
|
228
|
-
Args:
|
229
|
-
-----
|
230
|
-
input_request: is an Iterator of Tuple which
|
231
|
-
- First element (List[str]) is list of input text:
|
232
|
-
- Second element (Dict[str, Union[bool, str, float, int]]): is a dict of inference_parameters
|
233
|
-
|
234
|
-
Yield:
|
235
|
-
--------
|
236
|
-
List of OutputDataHandler
|
237
|
-
"""
|
238
|
-
raise NotImplementedError
|
@@ -1,80 +0,0 @@
|
|
1
|
-
import inspect
|
2
|
-
from abc import ABC, abstractmethod
|
3
|
-
from concurrent.futures import ThreadPoolExecutor
|
4
|
-
from typing import Any, Dict, Iterator, List, get_type_hints
|
5
|
-
|
6
|
-
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
7
|
-
|
8
|
-
from clarifai.runners.utils.data_handler import Output, kwargs_to_proto, proto_to_kwargs
|
9
|
-
|
10
|
-
|
11
|
-
class ModelClass(ABC):
|
12
|
-
|
13
|
-
@abstractmethod
|
14
|
-
def load_model(self):
|
15
|
-
raise NotImplementedError("load_model() not implemented")
|
16
|
-
|
17
|
-
@abstractmethod
|
18
|
-
def predict(self, *args, **kwargs) -> Output:
|
19
|
-
raise NotImplementedError("predict() not implemented")
|
20
|
-
|
21
|
-
@abstractmethod
|
22
|
-
def generate(self, *args, **kwargs) -> Iterator[Output]:
|
23
|
-
raise NotImplementedError("generate() not implemented")
|
24
|
-
|
25
|
-
@abstractmethod
|
26
|
-
def stream(self, *args, **kwargs) -> Iterator[Output]:
|
27
|
-
raise NotImplementedError("stream() not implemented")
|
28
|
-
|
29
|
-
def batch_predict(self, inputs: List[Dict[str, Any]]) -> List[Output]:
|
30
|
-
with ThreadPoolExecutor() as executor:
|
31
|
-
return list(executor.map(lambda x: self.predict(**x), inputs))
|
32
|
-
|
33
|
-
def _process_request(self, request, process_func, is_stream=False):
|
34
|
-
inputs = self._convert_proto_to_python(request.inputs)
|
35
|
-
if len(inputs) == 1:
|
36
|
-
result = process_func(**inputs[0])
|
37
|
-
if is_stream:
|
38
|
-
return (self._convert_output_to_proto(output) for output in result)
|
39
|
-
else:
|
40
|
-
return [self._convert_output_to_proto(result)]
|
41
|
-
else:
|
42
|
-
results = self.batch_predict(inputs) if not is_stream else []
|
43
|
-
return [self._convert_output_to_proto(output) for output in results]
|
44
|
-
|
45
|
-
def predict_wrapper(
|
46
|
-
self, request: service_pb2.PostModelOutputsRequest) -> service_pb2.MultiOutputResponse:
|
47
|
-
outputs = self._process_request(request, self.predict)
|
48
|
-
return service_pb2.MultiOutputResponse(outputs=outputs)
|
49
|
-
|
50
|
-
def generate_wrapper(self, request: service_pb2.PostModelOutputsRequest
|
51
|
-
) -> Iterator[service_pb2.MultiOutputResponse]:
|
52
|
-
outputs = self._process_request(request, self.generate, is_stream=True)
|
53
|
-
for output in outputs:
|
54
|
-
yield service_pb2.MultiOutputResponse(outputs=[output])
|
55
|
-
|
56
|
-
def stream_wrapper(self, requests: Iterator[service_pb2.PostModelOutputsRequest]
|
57
|
-
) -> Iterator[service_pb2.MultiOutputResponse]:
|
58
|
-
for request in requests:
|
59
|
-
outputs = self._process_request(request, self.stream, is_stream=True)
|
60
|
-
yield service_pb2.MultiOutputResponse(outputs=outputs)
|
61
|
-
|
62
|
-
def _convert_proto_to_python(self, inputs: List[resources_pb2.Input]) -> List[Dict[str, Any]]:
|
63
|
-
get_type_hints(self.predict)
|
64
|
-
required_params = [
|
65
|
-
name for name, param in inspect.signature(self.predict).parameters.items()
|
66
|
-
if param.default == inspect.Parameter.empty
|
67
|
-
]
|
68
|
-
kwargs_list = []
|
69
|
-
for input_proto in inputs:
|
70
|
-
kwargs = proto_to_kwargs(input_proto.data)
|
71
|
-
missing = [name for name in required_params if name not in kwargs]
|
72
|
-
if missing:
|
73
|
-
raise ValueError(f"Missing required parameters: {missing}")
|
74
|
-
kwargs_list.append(kwargs)
|
75
|
-
return kwargs_list
|
76
|
-
|
77
|
-
def _convert_output_to_proto(self, output: Any) -> resources_pb2.Output:
|
78
|
-
if isinstance(output, Output):
|
79
|
-
return output.to_proto()
|
80
|
-
return kwargs_to_proto(**output).outputs.add()
|