clarifai 11.0.4__py3-none-any.whl → 11.0.6rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (154) hide show
  1. clarifai/__init__.py +1 -1
  2. clarifai/__pycache__/__init__.cpython-310.pyc +0 -0
  3. clarifai/__pycache__/errors.cpython-310.pyc +0 -0
  4. clarifai/__pycache__/versions.cpython-310.pyc +0 -0
  5. clarifai/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  6. clarifai/cli/__pycache__/base.cpython-310.pyc +0 -0
  7. clarifai/cli/__pycache__/compute_cluster.cpython-310.pyc +0 -0
  8. clarifai/cli/__pycache__/deployment.cpython-310.pyc +0 -0
  9. clarifai/cli/__pycache__/model.cpython-310.pyc +0 -0
  10. clarifai/cli/__pycache__/nodepool.cpython-310.pyc +0 -0
  11. clarifai/client/__pycache__/__init__.cpython-310.pyc +0 -0
  12. clarifai/client/__pycache__/app.cpython-310.pyc +0 -0
  13. clarifai/client/__pycache__/base.cpython-310.pyc +0 -0
  14. clarifai/client/__pycache__/dataset.cpython-310.pyc +0 -0
  15. clarifai/client/__pycache__/input.cpython-310.pyc +0 -0
  16. clarifai/client/__pycache__/lister.cpython-310.pyc +0 -0
  17. clarifai/client/__pycache__/model.cpython-310.pyc +0 -0
  18. clarifai/client/__pycache__/module.cpython-310.pyc +0 -0
  19. clarifai/client/__pycache__/runner.cpython-310.pyc +0 -0
  20. clarifai/client/__pycache__/search.cpython-310.pyc +0 -0
  21. clarifai/client/__pycache__/user.cpython-310.pyc +0 -0
  22. clarifai/client/__pycache__/workflow.cpython-310.pyc +0 -0
  23. clarifai/client/auth/__pycache__/__init__.cpython-310.pyc +0 -0
  24. clarifai/client/auth/__pycache__/helper.cpython-310.pyc +0 -0
  25. clarifai/client/auth/__pycache__/register.cpython-310.pyc +0 -0
  26. clarifai/client/auth/__pycache__/stub.cpython-310.pyc +0 -0
  27. clarifai/constants/__pycache__/dataset.cpython-310.pyc +0 -0
  28. clarifai/constants/__pycache__/model.cpython-310.pyc +0 -0
  29. clarifai/constants/__pycache__/search.cpython-310.pyc +0 -0
  30. clarifai/datasets/__pycache__/__init__.cpython-310.pyc +0 -0
  31. clarifai/datasets/export/__pycache__/__init__.cpython-310.pyc +0 -0
  32. clarifai/datasets/export/__pycache__/inputs_annotations.cpython-310.pyc +0 -0
  33. clarifai/datasets/upload/__pycache__/__init__.cpython-310.pyc +0 -0
  34. clarifai/datasets/upload/__pycache__/base.cpython-310.pyc +0 -0
  35. clarifai/datasets/upload/__pycache__/features.cpython-310.pyc +0 -0
  36. clarifai/datasets/upload/__pycache__/image.cpython-310.pyc +0 -0
  37. clarifai/datasets/upload/__pycache__/text.cpython-310.pyc +0 -0
  38. clarifai/datasets/upload/__pycache__/utils.cpython-310.pyc +0 -0
  39. clarifai/models/__pycache__/__init__.cpython-310.pyc +0 -0
  40. clarifai/models/model_serving/README.md +158 -0
  41. clarifai/models/model_serving/__init__.py +14 -0
  42. clarifai/models/model_serving/__pycache__/__init__.cpython-310.pyc +0 -0
  43. clarifai/models/model_serving/__pycache__/constants.cpython-310.pyc +0 -0
  44. clarifai/models/model_serving/cli/__init__.py +12 -0
  45. clarifai/models/model_serving/cli/__pycache__/__init__.cpython-310.pyc +0 -0
  46. clarifai/models/model_serving/cli/__pycache__/_utils.cpython-310.pyc +0 -0
  47. clarifai/models/model_serving/cli/__pycache__/base.cpython-310.pyc +0 -0
  48. clarifai/models/model_serving/cli/__pycache__/build.cpython-310.pyc +0 -0
  49. clarifai/models/model_serving/cli/__pycache__/create.cpython-310.pyc +0 -0
  50. clarifai/models/model_serving/cli/_utils.py +53 -0
  51. clarifai/models/model_serving/cli/base.py +14 -0
  52. clarifai/models/model_serving/cli/build.py +79 -0
  53. clarifai/models/model_serving/cli/clarifai_clis.py +33 -0
  54. clarifai/models/model_serving/cli/create.py +171 -0
  55. clarifai/models/model_serving/cli/example_cli.py +34 -0
  56. clarifai/models/model_serving/cli/login.py +26 -0
  57. clarifai/models/model_serving/cli/upload.py +183 -0
  58. clarifai/models/model_serving/constants.py +21 -0
  59. clarifai/models/model_serving/docs/cli.md +161 -0
  60. clarifai/models/model_serving/docs/concepts.md +229 -0
  61. clarifai/models/model_serving/docs/dependencies.md +11 -0
  62. clarifai/models/model_serving/docs/inference_parameters.md +139 -0
  63. clarifai/models/model_serving/docs/model_types.md +19 -0
  64. clarifai/models/model_serving/model_config/__init__.py +16 -0
  65. clarifai/models/model_serving/model_config/__pycache__/__init__.cpython-310.pyc +0 -0
  66. clarifai/models/model_serving/model_config/__pycache__/base.cpython-310.pyc +0 -0
  67. clarifai/models/model_serving/model_config/__pycache__/config.cpython-310.pyc +0 -0
  68. clarifai/models/model_serving/model_config/__pycache__/inference_parameter.cpython-310.pyc +0 -0
  69. clarifai/models/model_serving/model_config/__pycache__/output.cpython-310.pyc +0 -0
  70. clarifai/models/model_serving/model_config/base.py +369 -0
  71. clarifai/models/model_serving/model_config/config.py +312 -0
  72. clarifai/models/model_serving/model_config/inference_parameter.py +129 -0
  73. clarifai/models/model_serving/model_config/model_types_config/multimodal-embedder.yaml +25 -0
  74. clarifai/models/model_serving/model_config/model_types_config/text-classifier.yaml +19 -0
  75. clarifai/models/model_serving/model_config/model_types_config/text-embedder.yaml +20 -0
  76. clarifai/models/model_serving/model_config/model_types_config/text-to-image.yaml +19 -0
  77. clarifai/models/model_serving/model_config/model_types_config/text-to-text.yaml +19 -0
  78. clarifai/models/model_serving/model_config/model_types_config/visual-classifier.yaml +22 -0
  79. clarifai/models/model_serving/model_config/model_types_config/visual-detector.yaml +32 -0
  80. clarifai/models/model_serving/model_config/model_types_config/visual-embedder.yaml +19 -0
  81. clarifai/models/model_serving/model_config/model_types_config/visual-segmenter.yaml +19 -0
  82. clarifai/models/model_serving/model_config/output.py +133 -0
  83. clarifai/models/model_serving/model_config/triton/__init__.py +14 -0
  84. clarifai/models/model_serving/model_config/triton/__pycache__/__init__.cpython-310.pyc +0 -0
  85. clarifai/models/model_serving/model_config/triton/__pycache__/serializer.cpython-310.pyc +0 -0
  86. clarifai/models/model_serving/model_config/triton/__pycache__/triton_config.cpython-310.pyc +0 -0
  87. clarifai/models/model_serving/model_config/triton/__pycache__/wrappers.cpython-310.pyc +0 -0
  88. clarifai/models/model_serving/model_config/triton/serializer.py +136 -0
  89. clarifai/models/model_serving/model_config/triton/triton_config.py +182 -0
  90. clarifai/models/model_serving/model_config/triton/wrappers.py +281 -0
  91. clarifai/models/model_serving/repo_build/__init__.py +14 -0
  92. clarifai/models/model_serving/repo_build/__pycache__/__init__.cpython-310.pyc +0 -0
  93. clarifai/models/model_serving/repo_build/__pycache__/build.cpython-310.pyc +0 -0
  94. clarifai/models/model_serving/repo_build/build.py +198 -0
  95. clarifai/models/model_serving/repo_build/static_files/__pycache__/base_test.cpython-310-pytest-7.2.0.pyc +0 -0
  96. clarifai/models/model_serving/repo_build/static_files/_requirements.txt +2 -0
  97. clarifai/models/model_serving/repo_build/static_files/base_test.py +169 -0
  98. clarifai/models/model_serving/repo_build/static_files/inference.py +26 -0
  99. clarifai/models/model_serving/repo_build/static_files/sample_clarifai_config.yaml +25 -0
  100. clarifai/models/model_serving/repo_build/static_files/test.py +40 -0
  101. clarifai/models/model_serving/repo_build/static_files/triton/model.py +75 -0
  102. clarifai/models/model_serving/utils.py +31 -0
  103. clarifai/rag/__pycache__/__init__.cpython-310.pyc +0 -0
  104. clarifai/rag/__pycache__/rag.cpython-310.pyc +0 -0
  105. clarifai/rag/__pycache__/utils.cpython-310.pyc +0 -0
  106. clarifai/runners/__pycache__/__init__.cpython-310.pyc +0 -0
  107. clarifai/runners/__pycache__/server.cpython-310.pyc +0 -0
  108. clarifai/runners/deepgram_live_transcribe.py +98 -0
  109. clarifai/runners/deepgram_live_transcribe.py~ +98 -0
  110. clarifai/runners/deepgram_runner.py +131 -0
  111. clarifai/runners/deepgram_runner.py~ +130 -0
  112. clarifai/runners/dockerfile_template/Dockerfile.cpu.template +31 -0
  113. clarifai/runners/dockerfile_template/Dockerfile.cuda.template +79 -0
  114. clarifai/runners/dockerfile_template/Dockerfile.template +54 -29
  115. clarifai/runners/example_llama2.py~ +72 -0
  116. clarifai/runners/matt_example.py +89 -0
  117. clarifai/runners/matt_example.py~ +87 -0
  118. clarifai/runners/matt_llm_example.py +129 -0
  119. clarifai/runners/matt_llm_example.py~ +128 -0
  120. clarifai/runners/models/__pycache__/__init__.cpython-310.pyc +0 -0
  121. clarifai/runners/models/__pycache__/base_typed_model.cpython-310.pyc +0 -0
  122. clarifai/runners/models/__pycache__/model_class.cpython-310.pyc +0 -0
  123. clarifai/runners/models/__pycache__/model_run_locally.cpython-310.pyc +0 -0
  124. clarifai/runners/models/__pycache__/model_runner.cpython-310.pyc +0 -0
  125. clarifai/runners/models/__pycache__/model_servicer.cpython-310.pyc +0 -0
  126. clarifai/runners/models/__pycache__/model_upload.cpython-310.pyc +0 -0
  127. clarifai/runners/models/model_upload.py +9 -4
  128. clarifai/runners/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  129. clarifai/runners/utils/__pycache__/const.cpython-310.pyc +0 -0
  130. clarifai/runners/utils/__pycache__/data_handler.cpython-310.pyc +0 -0
  131. clarifai/runners/utils/__pycache__/data_utils.cpython-310.pyc +0 -0
  132. clarifai/runners/utils/__pycache__/loader.cpython-310.pyc +0 -0
  133. clarifai/runners/utils/__pycache__/logging.cpython-310.pyc +0 -0
  134. clarifai/runners/utils/__pycache__/url_fetcher.cpython-310.pyc +0 -0
  135. clarifai/runners/utils/const.py +7 -6
  136. clarifai/runners/utils/logging.py +6 -0
  137. clarifai/schema/__pycache__/search.cpython-310.pyc +0 -0
  138. clarifai/urls/__pycache__/helper.cpython-310.pyc +0 -0
  139. clarifai/utils/__pycache__/__init__.cpython-310.pyc +0 -0
  140. clarifai/utils/__pycache__/logging.cpython-310.pyc +0 -0
  141. clarifai/utils/__pycache__/misc.cpython-310.pyc +0 -0
  142. clarifai/utils/__pycache__/model_train.cpython-310.pyc +0 -0
  143. clarifai/utils/logging.py +1 -1
  144. clarifai/workflows/__pycache__/__init__.cpython-310.pyc +0 -0
  145. clarifai/workflows/__pycache__/export.cpython-310.pyc +0 -0
  146. clarifai/workflows/__pycache__/utils.cpython-310.pyc +0 -0
  147. clarifai/workflows/__pycache__/validate.cpython-310.pyc +0 -0
  148. {clarifai-11.0.4.dist-info → clarifai-11.0.6rc1.dist-info}/METADATA +16 -27
  149. clarifai-11.0.6rc1.dist-info/RECORD +242 -0
  150. {clarifai-11.0.4.dist-info → clarifai-11.0.6rc1.dist-info}/WHEEL +1 -1
  151. clarifai-11.0.4.dist-info/RECORD +0 -100
  152. {clarifai-11.0.4.dist-info → clarifai-11.0.6rc1.dist-info}/LICENSE +0 -0
  153. {clarifai-11.0.4.dist-info → clarifai-11.0.6rc1.dist-info}/entry_points.txt +0 -0
  154. {clarifai-11.0.4.dist-info → clarifai-11.0.6rc1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,31 @@
1
+ ARG PYTHON_VERSION=${PYTHON_VERSION}
2
+ FROM public.ecr.aws/docker/library/python:${PYTHON_VERSION}-slim-bookworm as build
3
+
4
+ # Set the working directory to /app
5
+ WORKDIR /app
6
+
7
+ COPY requirements.txt .
8
+ # Install requirements and cleanup before leaving this line.
9
+ # Note(zeiler): this could be in a future template as {{model_python_deps}}
10
+ RUN python -m pip install -r requirements.txt && rm -rf /root/.cache
11
+
12
+ # Install Clarifai SDK
13
+ RUN python -m pip install clarifai
14
+
15
+ # These will be set by the templaing system.
16
+ ENV CLARIFAI_PAT=${CLARIFAI_PAT}
17
+ ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
18
+ ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
19
+ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
20
+ ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
21
+ ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
22
+
23
+ # Copy the current folder into /app/model_dir that the SDK will expect.
24
+ COPY . /app/model_dir/${name}
25
+
26
+ # Add the model directory to the python path.
27
+ ENV PYTHONPATH "${PYTHONPATH}:/app/model_dir/${name}"
28
+
29
+ # Finally run the clarifai entrypoint to start the runner loop and local dev server.
30
+ # Note(zeiler): we may want to make this a clarifai CLI call.
31
+ CMD ["python", "-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]
@@ -0,0 +1,79 @@
1
+ # Build a virtualenv containing necessary system libraries and Python packages
2
+ # for users to install their own packages while also being distroless.
3
+ # * Install python3-venv
4
+ # * Install gcc libpython3-dev to compile C Python modules
5
+ # * In the virtualenv: Update pip setuputils and wheel to support building new packages
6
+ # * Export environment variables to use the virtualenv by default
7
+ # * Create a non-root user with minimal privileges and use it
8
+ ARG TARGET_PLATFORM=linux/amd64
9
+ FROM --platform=$TARGET_PLATFORM public.ecr.aws/docker/library/python:${PYTHON_VERSION}-slim-bookworm as build
10
+
11
+ ENV DEBIAN_FRONTEND=noninteractive
12
+ RUN apt-get update && \
13
+ apt-get install --no-install-suggests --no-install-recommends --yes \
14
+ software-properties-common \
15
+ gcc \
16
+ libpython3-dev && \
17
+ python${PYTHON_VERSION} -m venv /venv && \
18
+ /venv/bin/pip install --disable-pip-version-check --upgrade pip setuptools wheel && \
19
+ apt-get clean && rm -rf /var/lib/apt/lists/*
20
+
21
+ # Set environment variables to use virtualenv by default
22
+ ENV VIRTUAL_ENV=/venv
23
+ ENV PATH="$VIRTUAL_ENV/bin:$PATH"
24
+
25
+ #############################
26
+ # User specific requirements
27
+ #############################
28
+ COPY requirements.txt .
29
+
30
+ # Install requirements and cleanup before leaving this line.
31
+ # Note(zeiler): this could be in a future template as {{model_python_deps}}
32
+ RUN python -m pip install -r requirements.txt && rm -rf /root/.cache
33
+
34
+ # Install Clarifai SDK
35
+ RUN python -m pip install clarifai
36
+
37
+ #############################
38
+ # Finally copy everything we built into a distroless image for runtime.
39
+ ######################>#######
40
+ ARG TARGET_PLATFORM=linux/amd64
41
+ FROM --platform=$TARGET_PLATFORM gcr.io/distroless/python3-debian12:latest
42
+ # FROM --platform=$TARGET_PLATFORM gcr.io/distroless/python3-debian12:debug
43
+ ARG PYTHON_VERSION=${PYTHON_VERSION}
44
+ # needed to call pip directly
45
+ COPY --from=build /bin/sh /bin/sh
46
+
47
+ # virtual env
48
+ COPY --from=build /venv /venv
49
+
50
+ # We have to overwrite the python3 binary that the distroless image uses
51
+ COPY --from=build /usr/local/bin/python${PYTHON_VERSION} /usr/bin/python3
52
+ # And also copy in all the lib files for it.
53
+ COPY --from=build /usr/local/lib/ /usr/lib/
54
+
55
+ # Set environment variables to use virtualenv by default
56
+ ENV VIRTUAL_ENV=/venv
57
+ ENV PYTHONPATH=${PYTHONPATH}:${VIRTUAL_ENV}/lib/python${PYTHON_VERSION}/site-packages
58
+
59
+ # These will be set by the templaing system.
60
+ ENV CLARIFAI_PAT=${CLARIFAI_PAT}
61
+ ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
62
+ ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
63
+ ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
64
+ ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
65
+ ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
66
+
67
+ # Set the working directory to /app
68
+ WORKDIR /app
69
+
70
+ # Copy the current folder into /app/model_dir that the SDK will expect.
71
+ # Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
72
+ COPY . /app/model_dir/${name}
73
+
74
+ # Add the model directory to the python path.
75
+ ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
76
+
77
+ # Finally run the clarifai entrypoint to start the runner loop and local dev server.
78
+ # Note(zeiler): we may want to make this a clarifai CLI call.
79
+ CMD ["-m", "clarifai.runners.server", "--model_path", "/app/model_dir/${name}"]
@@ -1,43 +1,68 @@
1
- FROM --platform=$TARGETPLATFORM ${BASE_IMAGE} as build
2
-
3
- ENV DEBIAN_FRONTEND=noninteractive
4
-
1
+ # syntax=docker/dockerfile:1
5
2
  #############################
6
- # User specific requirements
3
+ # User specific requirements installed in the pip_packages
7
4
  #############################
8
- COPY requirements.txt .
5
+ FROM --platform=$TARGETPLATFORM ${DOWNLOAD_IMAGE} as pip_packages
6
+
7
+ COPY --link requirements.txt /home/nonroot/requirements.txt
9
8
 
10
- # Install requirements and clarifai package and cleanup before leaving this line.
11
- # Note(zeiler): this could be in a future template as {{model_python_deps}}
12
- RUN pip install --no-cache-dir -r requirements.txt && \
13
- pip install --no-cache-dir clarifai
9
+ # Update clarifai package so we always have latest protocol to the API. Everything should land in /venv
10
+ RUN pip install --no-cache-dir -r /home/nonroot/requirements.txt && \
11
+ (pip install --upgrade --upgrade-strategy only-if-needed --no-deps --no-cache-dir clarifai clarifai-grpc clarifai-protocol || true)
12
+ #############################
14
13
 
15
- # These will be set by the templaing system.
16
- ENV CLARIFAI_PAT=${CLARIFAI_PAT}
17
- ENV CLARIFAI_USER_ID=${CLARIFAI_USER_ID}
18
- ENV CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID}
19
- ENV CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID}
20
- ENV CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID}
21
- ENV CLARIFAI_API_BASE=${CLARIFAI_API_BASE}
14
+ #############################
15
+ # Final image
16
+ #############################
17
+ FROM --platform=$TARGETPLATFORM ${BASE_IMAGE} as final
22
18
 
23
19
  # Set the NUMBA cache dir to /tmp
24
- ENV NUMBA_CACHE_DIR=/tmp/numba_cache
25
20
  # Set the TORCHINDUCTOR cache dir to /tmp
26
- ENV TORCHINDUCTOR_CACHE_DIR=/tmp/torchinductor_cache
27
- ENV HOME=/tmp
21
+ # The CLARIFAI* will be set by the templaing system.
22
+ ENV CLARIFAI_PAT=${CLARIFAI_PAT} \
23
+ CLARIFAI_USER_ID=${CLARIFAI_USER_ID} \
24
+ CLARIFAI_RUNNER_ID=${CLARIFAI_RUNNER_ID} \
25
+ CLARIFAI_NODEPOOL_ID=${CLARIFAI_NODEPOOL_ID} \
26
+ CLARIFAI_COMPUTE_CLUSTER_ID=${CLARIFAI_COMPUTE_CLUSTER_ID} \
27
+ CLARIFAI_API_BASE=${CLARIFAI_API_BASE} \
28
+ NUMBA_CACHE_DIR=/tmp/numba_cache \
29
+ TORCHINDUCTOR_CACHE_DIR=/tmp/torchinductor_cache \
30
+ HOME=/tmp \
31
+ DEBIAN_FRONTEND=noninteractive
28
32
 
29
- # Set the working directory to /app
30
- WORKDIR /app
33
+ # make sure we have the latest clarifai package.
34
+ RUN (pip install --upgrade --upgrade-strategy only-if-needed --no-cache-dir clarifai clarifai-grpc clarifai-protocol || true)
31
35
 
32
- # Copy the current folder into /app/model_dir that the SDK will expect.
33
- # Note(zeiler): would be nice to exclude checkpoints in case they were pre-downloaded.
34
- COPY . /app/model_dir/${name}
36
+ #####
37
+ # Download any checkpoints now into the final image so we don't copy large checkpoints across stages.
38
+ #####
39
+ # ENV HF_HUB_ENABLE_HF_TRANSFER=1
40
+ # This creates the directory that HF downloader will populate and with nonroot:nonroot persmisions up.
41
+ COPY --chown=nonroot:nonroot downloader/requirements.txt /home/nonroot/model_dir/main/1/checkpoints/.cache/not_used.yaml
35
42
 
36
- # Add the model directory to the python path.
37
- ENV PYTHONPATH=${PYTHONPATH}:/app/model_dir/${name}
43
+ # Mount the folder with the download_checkpoints.py, config.yaml that's been parsed then download
44
+ # the checkpoints. This invalidates the cache if we have a different repo_id to download, but the
45
+ # HF_TOKEN changing does not invalidate. It's validated before running the dockerfile
46
+ RUN --mount=type=secret,id=HF_TOKEN,env=HF_TOKEN --mount=type=bind,rw,source=downloader,target=/home/nonroot/model_dir/main [ -s /home/nonroot/model_dir/main/config.yaml ] && HF_HUB_ENABLE_HF_TRANSFER=1 python /home/nonroot/model_dir/main/download_checkpoints.py --model_path /home/nonroot/model_dir/main
47
+ #####
38
48
 
39
- ENTRYPOINT ["python", "-m", "clarifai.runners.server"]
49
+ #####
50
+ # Copy the python packages from the previous stage.
51
+ #####
52
+ COPY --link=true --from=pip_packages /venv /venv
53
+ #####
54
+ # Copy in the actual files like config.yaml, requirements.txt, and most importantly 1/model.py
55
+ # for the actual model.
56
+ # If checkpoints aren't downloaded since a checkpoints: block is not provided, then they will
57
+ # be in the build context and copied here as well.
58
+ COPY --link=true 1/model.py /home/nonroot/main/1/model.py
59
+ # At this point we only need these for validation in the SDK.
60
+ COPY --link=true requirements.txt config.yaml /home/nonroot/main/
40
61
 
62
+ # Add the model directory to the python path.
63
+ ENV PYTHONPATH=${PYTHONPATH}:/home/nonroot/main
41
64
  # Finally run the clarifai entrypoint to start the runner loop and local dev server.
42
65
  # Note(zeiler): we may want to make this a clarifai CLI call.
43
- CMD ["--model_path", "/app/model_dir/main"]
66
+ ENTRYPOINT ["python", "-m", "clarifai.runners.server"]
67
+ CMD ["--model_path", "/home/nonroot/main"]
68
+ #############################
@@ -0,0 +1,72 @@
1
+ from clarifai.client.runner import Runner
2
+ from clarifai_grpc.grpc.api import resources_pb2
3
+
4
+ # This example requires to run the following before running this example:
5
+ # pip install transformers
6
+
7
+ # https://huggingface.co/TheBloke/Llama-2-70B-chat-GPTQ
8
+ model_name_or_path = "TheBloke/Llama-2-7B-chat-GPTQ"
9
+ model_basename = "model"
10
+
11
+ use_triton = False
12
+
13
+
14
+ class Llama2Runner(Runner):
15
+ """A custom runner that runs the LLama2 LLM.
16
+ """
17
+
18
+ def __init__(self, *args, **kwargs):
19
+ print("Starting to load the model...")
20
+ st = time.time()
21
+ self.tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
22
+ self.model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map='auto')
23
+
24
+ self.logger.info("Loading model complete in (%f seconds), ready to loop for requests." %
25
+ (time.time() - st))
26
+ super(MyRunner, self).__init__(*args, **kwargs)
27
+
28
+ def run_input(self, input: resources_pb2.Input,
29
+ output_info: resources_pb2.OutputInfo) -> resources_pb2.Output:
30
+ """This is the method that will be called when the runner is run. It takes in an input and
31
+ returns an output.
32
+ """
33
+
34
+ output = resources_pb2.Output()
35
+ data = input.data
36
+ if data.text.raw != "":
37
+ input_text = data.text.raw
38
+ elif data.text.url != "":
39
+ input_text = str(requests.get(data.text.url).text)
40
+ else:
41
+ raise Exception("Need to include data.text.raw or data.text.url in your inputs.")
42
+
43
+ st = time.time()
44
+ max_tokens = 4096
45
+ # # Method 1
46
+ # input_ids = self.tokenizer(input_text, return_tensors='pt').input_ids.cuda()
47
+ # out = self.model.generate(inputs=input_ids, temperature=0.7, max_new_tokens=max_tokens)
48
+ # out_text = self.tokenizer.decode(out[0], skip_special_tokens=True)
49
+ # output.data.text.raw = out_text.replace(input_text, '')
50
+
51
+ # # Method 2
52
+ pipe = pipeline(
53
+ "text-generation",
54
+ model=self.model,
55
+ tokenizer=self.tokenizer,
56
+ max_new_tokens=max_tokens,
57
+ temperature=0.7,
58
+ top_p=0.95,
59
+ repetition_penalty=1.15,
60
+ return_full_text=False)
61
+ a = pipe(input_text)
62
+ output.data.text.raw = a[0]['generated_text']
63
+ return output
64
+
65
+
66
+ if __name__ == '__main__':
67
+ # Make sure you set these env vars before running the example.
68
+ # CLARIFAI_PAT
69
+ # CLARIFAI_USER_ID
70
+
71
+ # You need to first create a runner in the Clarifai API and then use the ID here.
72
+ Llama2Runner(runner_id="sdk-llama2-runner").start()
@@ -0,0 +1,89 @@
1
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
2
+ from collections.abc import Iterator
3
+ from google.protobuf import json_format
4
+
5
+ from clarifai.client.runner import Runner
6
+
7
+
8
+ class MyRunner(Runner):
9
+ """A custom runner that adds "Hello World" to the end of the text and replaces the domain of the
10
+ image URL as an example.
11
+ """
12
+
13
+ def run_input(self, input: resources_pb2.Input, output_info: resources_pb2.OutputInfo,
14
+ **kwargs) -> resources_pb2.Output:
15
+ """This is the method that will be called when the runner is run. It takes in an input and
16
+ returns an output.
17
+ """
18
+
19
+ output = resources_pb2.Output()
20
+
21
+ data = input.data
22
+
23
+ # Optional use of output_info
24
+ params_dict = {}
25
+ if "params" in output_info:
26
+ params_dict = output_info["params"]
27
+
28
+ if data.text.raw != "":
29
+ output.data.text.raw = data.text.raw + "Hello World" + params_dict.get(
30
+ "hello", "") + kwargs.get("extra", "")
31
+ if data.image.url != "":
32
+ output.data.text.raw = data.image.url.replace("samples.clarifai.com",
33
+ "newdomain.com" + params_dict.get("domain",))
34
+ return output
35
+
36
+ def generate(self, request: service_pb2.PostModelOutputsRequest
37
+ ) -> Iterator[service_pb2.MultiOutputResponse]:
38
+ """Example yielding a whole batch of streamed stuff back.
39
+ """
40
+
41
+ model = request.model
42
+ output_info = None
43
+ if request.model.model_version.id != "":
44
+ output_info = json_format.MessageToDict(
45
+ model.model_version.output_info, preserving_proto_field_name=True)
46
+
47
+ for i in range(10): # fake something iterating generating 10 times.
48
+
49
+ outputs = []
50
+ for input in request.inputs:
51
+ # output = self.run_input(input, output_info, extra=f" {i}")
52
+ output = resources_pb2.Output()
53
+ output.data.text.raw = f"Generate Hello World {i}"
54
+ outputs.append(output)
55
+ resp = service_pb2.MultiOutputResponse(outputs=outputs,)
56
+ yield resp
57
+
58
+ def stream(self, request: service_pb2.PostModelOutputsRequest
59
+ ) -> Iterator[service_pb2.MultiOutputResponse]:
60
+ """Example yielding a whole batch of streamed stuff back.
61
+ """
62
+
63
+ model = request.model
64
+ output_info = None
65
+ if request.model.model_version.id != "":
66
+ output_info = json_format.MessageToDict(
67
+ model.model_version.output_info, preserving_proto_field_name=True)
68
+
69
+ for i in range(10): # fake something iterating generating 10 times.
70
+
71
+ outputs = []
72
+ for input in request.inputs:
73
+ # output = self.run_input(input, output_info, extra=f" {i}")
74
+ output = resources_pb2.Output()
75
+ out_text = input.data.text.raw + f"Stream Hello World {i}"
76
+ print(out_text)
77
+ output.data.text.raw = out_text
78
+ outputs.append(output)
79
+ resp = service_pb2.MultiOutputResponse(outputs=outputs,)
80
+ yield resp
81
+
82
+
83
+ if __name__ == '__main__':
84
+ # Make sure you set these env vars before running the example.
85
+ # CLARIFAI_PAT
86
+ # CLARIFAI_USER_ID
87
+
88
+ # You need to first create a runner in the Clarifai API and then use the ID here.
89
+ MyRunner(runner_id="matt-test-runner", base_url="http://q6:32013", num_parallel_polls=1).start()
@@ -0,0 +1,87 @@
1
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
2
+ from collections.abc import Iterator
3
+ from google.protobuf import json_format
4
+
5
+ from clarifai.client.runner import Runner
6
+
7
+
8
+ class MyRunner(Runner):
9
+ """A custom runner that adds "Hello World" to the end of the text and replaces the domain of the
10
+ image URL as an example.
11
+ """
12
+
13
+ def run_input(self, input: resources_pb2.Input, output_info: resources_pb2.OutputInfo,
14
+ **kwargs) -> resources_pb2.Output:
15
+ """This is the method that will be called when the runner is run. It takes in an input and
16
+ returns an output.
17
+ """
18
+
19
+ output = resources_pb2.Output()
20
+
21
+ data = input.data
22
+
23
+ # Optional use of output_info
24
+ params_dict = {}
25
+ if "params" in output_info:
26
+ params_dict = output_info["params"]
27
+
28
+ if data.text.raw != "":
29
+ output.data.text.raw = data.text.raw + "Hello World" + params_dict.get(
30
+ "hello", "") + kwargs.get("extra", "")
31
+ if data.image.url != "":
32
+ output.data.text.raw = data.image.url.replace("samples.clarifai.com",
33
+ "newdomain.com" + params_dict.get("domain",))
34
+ return output
35
+
36
+ def generate(self, request: service_pb2.PostModelOutputsRequest
37
+ ) -> Iterator[service_pb2.MultiOutputResponse]:
38
+ """Example yielding a whole batch of streamed stuff back.
39
+ """
40
+
41
+ model = request.model
42
+ output_info = None
43
+ if request.model.model_version.id != "":
44
+ output_info = json_format.MessageToDict(
45
+ model.model_version.output_info, preserving_proto_field_name=True)
46
+
47
+ for i in range(10): # fake something iterating generating 10 times.
48
+
49
+ outputs = []
50
+ for input in request.inputs:
51
+ # output = self.run_input(input, output_info, extra=f" {i}")
52
+ output = resources_pb2.Output()
53
+ output.data.text.raw = f"Generate Hello World {i}"
54
+ outputs.append(output)
55
+ resp = service_pb2.MultiOutputResponse(outputs=outputs,)
56
+ yield resp
57
+
58
+ def stream(self, request: service_pb2.PostModelOutputsRequest
59
+ ) -> Iterator[service_pb2.MultiOutputResponse]:
60
+ """Example yielding a whole batch of streamed stuff back.
61
+ """
62
+
63
+ model = request.model
64
+ output_info = None
65
+ if request.model.model_version.id != "":
66
+ output_info = json_format.MessageToDict(
67
+ model.model_version.output_info, preserving_proto_field_name=True)
68
+
69
+ for i in range(10): # fake something iterating generating 10 times.
70
+
71
+ outputs = []
72
+ for input in request.inputs:
73
+ # output = self.run_input(input, output_info, extra=f" {i}")
74
+ output = resources_pb2.Output()
75
+ output.data.text.raw = input.data.text.raw + f"Stream Hello World {i}"
76
+ outputs.append(output)
77
+ resp = service_pb2.MultiOutputResponse(outputs=outputs,)
78
+ yield resp
79
+
80
+
81
+ if __name__ == '__main__':
82
+ # Make sure you set these env vars before running the example.
83
+ # CLARIFAI_PAT
84
+ # CLARIFAI_USER_ID
85
+
86
+ # You need to first create a runner in the Clarifai API and then use the ID here.
87
+ MyRunner(runner_id="matt-test-runner", base_url="http://q6:32013", num_parallel_polls=1).start()
@@ -0,0 +1,129 @@
1
+ from clarifai_grpc.grpc.api import resources_pb2, service_pb2
2
+ from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
3
+ from collections.abc import Iterator
4
+ from google.protobuf import json_format
5
+
6
+ from clarifai.client.runner import Runner
7
+ import time
8
+ from threading import Thread
9
+
10
+ import grpc
11
+ import requests
12
+
13
+ from transformers import (AutoModelForCausalLM, AutoTokenizer, TextIteratorStreamer)
14
+
15
+ model_name_or_path = "TheBloke/Llama-2-7B-chat-GPTQ"
16
+ model_basename = "model"
17
+ use_triton = False
18
+ tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, use_fast=True)
19
+ model = AutoModelForCausalLM.from_pretrained(model_name_or_path, device_map='auto')
20
+ streamer = TextIteratorStreamer(tokenizer)
21
+ print("Model loaded")
22
+
23
+
24
+ class MyRunner(Runner):
25
+ """A custom runner that adds "Hello World" to the end of the text and replaces the domain of the
26
+ image URL as an example.
27
+ """
28
+
29
+ def run_input(self, input: resources_pb2.Input, output_info: resources_pb2.OutputInfo,
30
+ **kwargs) -> resources_pb2.Output:
31
+ """This is the method that will be called when the runner is run. It takes in an input and
32
+ returns an output.
33
+ """
34
+
35
+ output = resources_pb2.Output()
36
+
37
+ data = input.data
38
+
39
+ # Optional use of output_info
40
+ params_dict = {}
41
+ if "params" in output_info:
42
+ params_dict = output_info["params"]
43
+
44
+ if data.text.raw != "":
45
+ output.data.text.raw = data.text.raw + "Hello World" + params_dict.get(
46
+ "hello", "") + kwargs.get("extra", "")
47
+ if data.image.url != "":
48
+ output.data.text.raw = data.image.url.replace("samples.clarifai.com",
49
+ "newdomain.com" + params_dict.get("domain",))
50
+ return output
51
+
52
+ def generate(self, request: service_pb2.PostModelOutputsRequest
53
+ ) -> Iterator[service_pb2.MultiOutputResponse]:
54
+ """Example yielding a whole batch of streamed stuff back.
55
+ """
56
+
57
+ output_info = None
58
+ if request.model.model_version.id != "":
59
+ output_info = json_format.MessageToDict(
60
+ request.model.model_version.output_info, preserving_proto_field_name=True)
61
+
62
+ for inp in request.inputs:
63
+ data = inp.data
64
+ print('start')
65
+ if data.text.raw != "":
66
+ input_text = data.text.raw
67
+ elif data.text.url != "":
68
+ input_text = str(requests.get(data.text.url).text)
69
+ else:
70
+ raise Exception("Need to include data.text.raw or data.text.url in your inputs.")
71
+
72
+ st = time.time()
73
+ max_tokens = 1024
74
+ # # Method 1
75
+ inputs = tokenizer(input_text, return_tensors='pt') #.input_ids.cuda()
76
+ generation_kwargs = dict(inputs, streamer=streamer, max_new_tokens=max_tokens)
77
+ thread = Thread(target=model.generate, kwargs=generation_kwargs)
78
+ thread.start()
79
+ times = []
80
+ st = time.time()
81
+ total_start = st
82
+ for new_text in streamer:
83
+ duration = time.time() - st
84
+ st = time.time()
85
+ print(f"Duration: {duration}")
86
+ times.append(duration)
87
+ # for new_text in ["hello", "world", "i'm", "streaming"]:
88
+
89
+ # out = model.generate(inputs=input_ids, temperature=0.7, max_new_tokens=max_tokens)
90
+ # out_text = tokenizer.decode(out[0], skip_special_tokens=True)
91
+ # output.data.text.raw = out_text.replace(input_text, '')
92
+
93
+ # # # Method 2
94
+ # print('before')
95
+ # pipe = pipeline(
96
+ # "text-generation",
97
+ # model=model,
98
+ # tokenizer=tokenizer,
99
+ # streamer=streamer,
100
+ # max_new_tokens=max_tokens,
101
+ # temperature=0.7,
102
+ # top_p=0.95,
103
+ # repetition_penalty=1.15,
104
+ # return_full_text=False)
105
+ # print('pipe')
106
+ # a = pipe(input_text)
107
+ # print(a)
108
+ print("Posting: ", new_text)
109
+ output = resources_pb2.Output()
110
+ output.data.text.raw = new_text
111
+ result = service_pb2.MultiOutputResponse(
112
+ status=status_pb2.Status(
113
+ code=status_code_pb2.SUCCESS,
114
+ description="Success",
115
+ ),
116
+ outputs=[output],
117
+ )
118
+ yield result
119
+ print(f"Total time: {time.time() - total_start}")
120
+ print(f"Average time: {sum(times) / len(times)}")
121
+
122
+
123
+ if __name__ == '__main__':
124
+ # Make sure you set these env vars before running the example.
125
+ # CLARIFAI_PAT
126
+ # CLARIFAI_USER_ID
127
+
128
+ # You need to first create a runner in the Clarifai API and then use the ID here.
129
+ MyRunner(runner_id="matt-test-runner", base_url="http://q6:32013", num_parallel_polls=1).start()