clarifai 11.7.2__tar.gz → 11.7.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {clarifai-11.7.2/clarifai.egg-info → clarifai-11.7.3}/PKG-INFO +1 -1
- clarifai-11.7.3/clarifai/__init__.py +1 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/model_builder.py +1 -1
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/model_runner.py +13 -5
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/model_servicer.py +7 -0
- clarifai-11.7.3/clarifai/runners/server.py +311 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/loader.py +2 -0
- clarifai-11.7.3/clarifai/utils/secrets.py +214 -0
- {clarifai-11.7.2 → clarifai-11.7.3/clarifai.egg-info}/PKG-INFO +1 -1
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai.egg-info/SOURCES.txt +2 -0
- clarifai-11.7.3/tests/test_secrets.py +451 -0
- clarifai-11.7.2/clarifai/__init__.py +0 -1
- clarifai-11.7.2/clarifai/runners/server.py +0 -169
- {clarifai-11.7.2 → clarifai-11.7.3}/LICENSE +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/MANIFEST.in +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/README.md +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/README.md +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/__main__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/base.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/compute_cluster.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/deployment.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/model.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/nodepool.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/pipeline.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/pipeline_step.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/templates/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/templates/model_templates.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/templates/pipeline_step_templates.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli/templates/pipeline_templates.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/cli.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/app.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/auth/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/auth/helper.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/auth/register.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/auth/stub.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/base.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/compute_cluster.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/dataset.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/deployment.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/input.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/lister.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/model.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/model_client.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/module.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/nodepool.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/pipeline.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/pipeline_step.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/runner.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/search.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/user.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/client/workflow.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/base.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/dataset.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/input.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/model.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/rag.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/search.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/constants/workflow.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/export/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/export/inputs_annotations.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/base.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/features.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/image.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/README.md +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/coco_captions.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/coco_detection.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/imagenet_classification.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/loaders/xview_detection.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/multimodal.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/text.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/datasets/upload/utils.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/errors.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/models/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/models/api.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/modules/README.md +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/modules/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/modules/css.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/modules/pages.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/modules/style.css +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/rag/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/rag/rag.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/rag/utils.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/dockerfile_template/Dockerfile.template +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/dummy_openai_model.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/mcp_class.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/model_class.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/model_run_locally.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/openai_class.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/visual_classifier_class.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/models/visual_detector_class.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/pipeline_steps/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/pipeline_steps/pipeline_step_builder.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/pipelines/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/pipelines/pipeline_builder.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/code_script.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/const.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/data_types/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/data_types/data_types.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/data_utils.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/method_signatures.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/model_utils.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/openai_convertor.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/pipeline_validation.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/serializers.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/runners/utils/url_fetcher.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/schema/search.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/urls/helper.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/cli.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/config.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/constants.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/evaluation/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/evaluation/helpers.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/evaluation/main.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/evaluation/testset_annotation_parser.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/logging.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/misc.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/model_train.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/utils/protobuf.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/versions.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/workflows/__init__.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/workflows/export.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/workflows/utils.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai/workflows/validate.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai.egg-info/dependency_links.txt +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai.egg-info/entry_points.txt +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai.egg-info/requires.txt +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/clarifai.egg-info/top_level.txt +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/pyproject.toml +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/requirements.txt +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/setup.cfg +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/setup.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_app.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_async_stub.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_auth.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_data_upload.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_eval.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_list_models.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_misc.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_model_predict.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_model_train.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_modules.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_pipeline_client.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_rag.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_search.py +0 -0
- {clarifai-11.7.2 → clarifai-11.7.3}/tests/test_stub.py +0 -0
@@ -0,0 +1 @@
|
|
1
|
+
__version__ = "11.7.3"
|
@@ -109,7 +109,7 @@ class ModelBuilder:
|
|
109
109
|
self.inference_compute_info = self._get_inference_compute_info()
|
110
110
|
self.is_v3 = True # Do model build for v3
|
111
111
|
|
112
|
-
def create_model_instance(self, load_model=True, mocking=False):
|
112
|
+
def create_model_instance(self, load_model=True, mocking=False) -> ModelClass:
|
113
113
|
"""
|
114
114
|
Create an instance of the model class, as specified in the config file.
|
115
115
|
"""
|
@@ -1,5 +1,5 @@
|
|
1
1
|
import time
|
2
|
-
from typing import Iterator
|
2
|
+
from typing import Iterator, Optional
|
3
3
|
|
4
4
|
from clarifai_grpc.grpc.api import service_pb2
|
5
5
|
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
@@ -9,6 +9,7 @@ from clarifai_protocol.utils.health import HealthProbeRequestHandler
|
|
9
9
|
from clarifai.client.auth.helper import ClarifaiAuthHelper
|
10
10
|
from clarifai.utils.constants import STATUS_FAIL, STATUS_MIXED, STATUS_OK, STATUS_UNKNOWN
|
11
11
|
from clarifai.utils.logging import get_req_id_from_context, logger
|
12
|
+
from clarifai.utils.secrets import inject_secrets
|
12
13
|
|
13
14
|
from ..utils.url_fetcher import ensure_urls_downloaded
|
14
15
|
from .model_class import ModelClass
|
@@ -25,11 +26,11 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
25
26
|
runner_id: str,
|
26
27
|
nodepool_id: str,
|
27
28
|
compute_cluster_id: str,
|
28
|
-
user_id: str = None,
|
29
|
+
user_id: Optional[str] = None,
|
29
30
|
check_runner_exists: bool = True,
|
30
31
|
base_url: str = "https://api.clarifai.com",
|
31
|
-
pat: str = None,
|
32
|
-
token: str = None,
|
32
|
+
pat: Optional[str] = None,
|
33
|
+
token: Optional[str] = None,
|
33
34
|
num_parallel_polls: int = 4,
|
34
35
|
**kwargs,
|
35
36
|
) -> None:
|
@@ -54,7 +55,7 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
54
55
|
self._base_url = base_url
|
55
56
|
|
56
57
|
# Create auth helper if we have sufficient authentication information
|
57
|
-
self._auth_helper = None
|
58
|
+
self._auth_helper: Optional[ClarifaiAuthHelper] = None
|
58
59
|
if self._user_id and (self._pat or self._token):
|
59
60
|
try:
|
60
61
|
self._auth_helper = ClarifaiAuthHelper(
|
@@ -109,6 +110,7 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
109
110
|
raise Exception("Unexpected work item type: {}".format(runner_item))
|
110
111
|
request = runner_item.post_model_outputs_request
|
111
112
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
113
|
+
inject_secrets(request)
|
112
114
|
start_time = time.time()
|
113
115
|
req_id = get_req_id_from_context()
|
114
116
|
status_str = STATUS_UNKNOWN
|
@@ -175,6 +177,7 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
175
177
|
raise Exception("Unexpected work item type: {}".format(runner_item))
|
176
178
|
request = runner_item.post_model_outputs_request
|
177
179
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
180
|
+
inject_secrets(request)
|
178
181
|
|
179
182
|
# --- Live logging additions ---
|
180
183
|
start_time = time.time()
|
@@ -277,10 +280,15 @@ class ModelRunner(BaseRunner, HealthProbeRequestHandler):
|
|
277
280
|
duration_ms = (time.time() - start_time) * 1000
|
278
281
|
logger.info(f"{endpoint} | {status_str} | {duration_ms:.2f}ms | req_id={req_id}")
|
279
282
|
|
283
|
+
def set_model(self, model: ModelClass):
|
284
|
+
"""Set the model for this runner."""
|
285
|
+
self.model = model
|
286
|
+
|
280
287
|
|
281
288
|
def pmo_iterator(runner_item_iterator, auth_helper=None):
|
282
289
|
for runner_item in runner_item_iterator:
|
283
290
|
if not runner_item.HasField('post_model_outputs_request'):
|
284
291
|
raise Exception("Unexpected work item type: {}".format(runner_item))
|
285
292
|
ensure_urls_downloaded(runner_item.post_model_outputs_request, auth_helper=auth_helper)
|
293
|
+
inject_secrets(runner_item.post_model_outputs_request)
|
286
294
|
yield runner_item.post_model_outputs_request
|
@@ -6,6 +6,7 @@ from clarifai_grpc.grpc.api import service_pb2, service_pb2_grpc
|
|
6
6
|
from clarifai_grpc.grpc.api.status import status_code_pb2, status_pb2
|
7
7
|
|
8
8
|
from clarifai.client.auth.helper import ClarifaiAuthHelper
|
9
|
+
from clarifai.utils.secrets import inject_secrets
|
9
10
|
|
10
11
|
from ..utils.url_fetcher import ensure_urls_downloaded
|
11
12
|
|
@@ -56,6 +57,7 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
|
|
56
57
|
|
57
58
|
# Download any urls that are not already bytes.
|
58
59
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
60
|
+
inject_secrets(request)
|
59
61
|
|
60
62
|
try:
|
61
63
|
return self.model.predict_wrapper(request)
|
@@ -80,6 +82,7 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
|
|
80
82
|
"""
|
81
83
|
# Download any urls that are not already bytes.
|
82
84
|
ensure_urls_downloaded(request, auth_helper=self._auth_helper)
|
85
|
+
inject_secrets(request)
|
83
86
|
|
84
87
|
try:
|
85
88
|
yield from self.model.generate_wrapper(request)
|
@@ -108,6 +111,7 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
|
|
108
111
|
# Download any urls that are not already bytes.
|
109
112
|
for req in request:
|
110
113
|
ensure_urls_downloaded(req, auth_helper=self._auth_helper)
|
114
|
+
inject_secrets(req)
|
111
115
|
|
112
116
|
try:
|
113
117
|
yield from self.model.stream_wrapper(request_copy)
|
@@ -122,3 +126,6 @@ class ModelServicer(service_pb2_grpc.V2Servicer):
|
|
122
126
|
internal_details=str(e),
|
123
127
|
)
|
124
128
|
)
|
129
|
+
|
130
|
+
def set_model(self, model):
|
131
|
+
self.model = model
|
@@ -0,0 +1,311 @@
|
|
1
|
+
"""
|
2
|
+
This is simply the main file for the server that imports ModelRunner implementation
|
3
|
+
and starts the server.
|
4
|
+
"""
|
5
|
+
|
6
|
+
import argparse
|
7
|
+
import os
|
8
|
+
from concurrent import futures
|
9
|
+
from typing import Optional
|
10
|
+
|
11
|
+
from clarifai_grpc.grpc.api import service_pb2_grpc
|
12
|
+
from clarifai_protocol.utils.grpc_server import GRPCServer
|
13
|
+
|
14
|
+
from clarifai.runners.models.model_builder import ModelBuilder
|
15
|
+
from clarifai.runners.models.model_runner import ModelRunner
|
16
|
+
from clarifai.runners.models.model_servicer import ModelServicer
|
17
|
+
from clarifai.utils.logging import logger
|
18
|
+
from clarifai.utils.secrets import get_secrets_path, load_secrets, start_secrets_watcher
|
19
|
+
|
20
|
+
|
21
|
+
def main():
|
22
|
+
parser = argparse.ArgumentParser()
|
23
|
+
parser.add_argument(
|
24
|
+
'--port',
|
25
|
+
type=int,
|
26
|
+
default=8000,
|
27
|
+
help="The port to host the gRPC server at.",
|
28
|
+
choices=range(1024, 65535),
|
29
|
+
)
|
30
|
+
parser.add_argument(
|
31
|
+
'--pool_size',
|
32
|
+
type=int,
|
33
|
+
default=os.environ.get('CLARIFAI_NUM_THREADS', 32),
|
34
|
+
help="The number of threads to use for the gRPC server. Runner (ie. grpc=False) threads are read from the config file and ModelBuilder defaults.",
|
35
|
+
choices=range(1, 129),
|
36
|
+
) # pylint: disable=range-builtin-not-iterating
|
37
|
+
parser.add_argument(
|
38
|
+
'--max_queue_size',
|
39
|
+
type=int,
|
40
|
+
default=10,
|
41
|
+
help='Max queue size of requests before we begin to reject requests (default: 10).',
|
42
|
+
choices=range(1, 21),
|
43
|
+
) # pylint: disable=range-builtin-not-iterating
|
44
|
+
parser.add_argument(
|
45
|
+
'--max_msg_length',
|
46
|
+
type=int,
|
47
|
+
default=1024 * 1024 * 1024,
|
48
|
+
help='Max message length of grpc requests (default: 1 GB).',
|
49
|
+
)
|
50
|
+
parser.add_argument(
|
51
|
+
'--enable_tls',
|
52
|
+
action='store_true',
|
53
|
+
default=False,
|
54
|
+
help='Set to true to enable TLS (default: False) since this server is meant for local development only.',
|
55
|
+
)
|
56
|
+
parser.add_argument(
|
57
|
+
'--grpc',
|
58
|
+
action='store_true',
|
59
|
+
default=False,
|
60
|
+
help='Set to true to start the gRPC server (default: False). If set to false, the server will not start and only the runner loop will start to fetch work from the API.',
|
61
|
+
)
|
62
|
+
parser.add_argument(
|
63
|
+
'--model_path',
|
64
|
+
type=str,
|
65
|
+
required=True,
|
66
|
+
help='The path to the model directory that contains implemention of the model.',
|
67
|
+
)
|
68
|
+
|
69
|
+
parsed_args = parser.parse_args()
|
70
|
+
|
71
|
+
server = ModelServer(parsed_args.model_path)
|
72
|
+
server.serve(
|
73
|
+
port=parsed_args.port,
|
74
|
+
pool_size=parsed_args.pool_size,
|
75
|
+
max_queue_size=parsed_args.max_queue_size,
|
76
|
+
max_msg_length=parsed_args.max_msg_length,
|
77
|
+
enable_tls=parsed_args.enable_tls,
|
78
|
+
grpc=parsed_args.grpc,
|
79
|
+
)
|
80
|
+
|
81
|
+
|
82
|
+
class ModelServer:
|
83
|
+
def __init__(self, model_path):
|
84
|
+
self.model_path = model_path
|
85
|
+
self._servicer = None
|
86
|
+
self._runner = None
|
87
|
+
self._secrets_path = get_secrets_path()
|
88
|
+
self._watcher_thread = None
|
89
|
+
|
90
|
+
# Initialize secrets system with enhanced validation
|
91
|
+
self._initialize_secrets_system()
|
92
|
+
|
93
|
+
# Build model after secrets are loaded
|
94
|
+
self._builder = ModelBuilder(model_path, download_validation_only=True)
|
95
|
+
self._current_model = self._builder.create_model_instance()
|
96
|
+
logger.info("ModelServer initialized successfully")
|
97
|
+
|
98
|
+
def _initialize_secrets_system(self):
|
99
|
+
"""Initialize the secrets management system with comprehensive validation."""
|
100
|
+
if not self._secrets_path:
|
101
|
+
logger.info("No secrets path configured, running without secrets")
|
102
|
+
return
|
103
|
+
|
104
|
+
logger.info(f"Initializing secrets system with path: {self._secrets_path}")
|
105
|
+
|
106
|
+
# Load existing secrets if directory exists
|
107
|
+
if self._secrets_path.exists():
|
108
|
+
try:
|
109
|
+
loaded_secrets = load_secrets(self._secrets_path)
|
110
|
+
if loaded_secrets:
|
111
|
+
logger.info(f"Loaded {len(loaded_secrets)} initial secrets")
|
112
|
+
else:
|
113
|
+
logger.info("Secrets directory exists but contains no valid secrets")
|
114
|
+
except Exception as e:
|
115
|
+
logger.error(f"Error loading initial secrets: {e}")
|
116
|
+
else:
|
117
|
+
logger.info(f"Secrets directory does not exist yet: {self._secrets_path}")
|
118
|
+
|
119
|
+
# Always start the watcher regardless of current directory state
|
120
|
+
# This handles the case where secrets are mounted after server startup
|
121
|
+
try:
|
122
|
+
self._watcher_thread = start_secrets_watcher(
|
123
|
+
self._secrets_path, self.reload_model_on_secrets_change, interval=10.0
|
124
|
+
)
|
125
|
+
logger.info("Secrets watcher started successfully")
|
126
|
+
except Exception as e:
|
127
|
+
logger.error(f"Failed to start secrets watcher: {e}")
|
128
|
+
# Don't fail server startup if watcher fails
|
129
|
+
self._watcher_thread = None
|
130
|
+
|
131
|
+
def reload_model_on_secrets_change(self) -> None:
|
132
|
+
"""Reload model and environment secrets when the secrets directory changes.
|
133
|
+
|
134
|
+
This method implements a robust reload strategy with comprehensive error handling
|
135
|
+
and component state management.
|
136
|
+
"""
|
137
|
+
logger.info("Detected secrets change, initiating model reload sequence...")
|
138
|
+
|
139
|
+
# Step 1: Reload secrets from filesystem
|
140
|
+
if self._secrets_path is not None:
|
141
|
+
try:
|
142
|
+
loaded_secrets = load_secrets(self._secrets_path)
|
143
|
+
if loaded_secrets:
|
144
|
+
logger.info(f"Reloaded {len(loaded_secrets)} secrets")
|
145
|
+
else:
|
146
|
+
logger.warning("No secrets loaded during reload")
|
147
|
+
except Exception as e:
|
148
|
+
logger.error(f"Failed to reload secrets: {e}")
|
149
|
+
return
|
150
|
+
|
151
|
+
# Step 2: Rebuild model instance
|
152
|
+
if self._builder is not None:
|
153
|
+
try:
|
154
|
+
logger.info("Rebuilding model instance...")
|
155
|
+
self._current_model = self._builder.create_model_instance()
|
156
|
+
logger.info("Model instance rebuilt successfully")
|
157
|
+
except Exception as e:
|
158
|
+
logger.error(f"Failed to rebuild model instance: {e}")
|
159
|
+
# Keep the previous model instance if rebuild fails
|
160
|
+
return
|
161
|
+
|
162
|
+
# Step 3: Update servicer with new model
|
163
|
+
if self._servicer and self._current_model:
|
164
|
+
try:
|
165
|
+
self._servicer.set_model(self._current_model)
|
166
|
+
logger.info("Updated servicer with new model instance")
|
167
|
+
except Exception as e:
|
168
|
+
logger.error(f"Failed to update servicer with new model: {e}")
|
169
|
+
|
170
|
+
# Step 4: Update runner with new model
|
171
|
+
if self._runner and self._current_model:
|
172
|
+
try:
|
173
|
+
self._runner.set_model(self._current_model)
|
174
|
+
logger.info("Updated runner with new model instance")
|
175
|
+
except Exception as e:
|
176
|
+
logger.error(f"Failed to update runner with new model: {e}")
|
177
|
+
|
178
|
+
logger.info("Model reload sequence completed successfully")
|
179
|
+
|
180
|
+
def shutdown(self):
|
181
|
+
"""Gracefully shutdown the server and cleanup resources."""
|
182
|
+
logger.info("Shutting down ModelServer...")
|
183
|
+
|
184
|
+
# Stop the watcher thread
|
185
|
+
if self._watcher_thread and self._watcher_thread.is_alive():
|
186
|
+
logger.info("Stopping secrets watcher...")
|
187
|
+
# Note: Since it's a daemon thread, it will stop when main process exits
|
188
|
+
logger.info("ModelServer shutdown completed")
|
189
|
+
|
190
|
+
def serve(
|
191
|
+
self,
|
192
|
+
port=8000,
|
193
|
+
pool_size=32,
|
194
|
+
num_threads=0,
|
195
|
+
max_queue_size=10,
|
196
|
+
max_msg_length=1024 * 1024 * 1024,
|
197
|
+
enable_tls=False,
|
198
|
+
grpc=False,
|
199
|
+
user_id: Optional[str] = os.environ.get("CLARIFAI_USER_ID", None),
|
200
|
+
compute_cluster_id: Optional[str] = os.environ.get("CLARIFAI_COMPUTE_CLUSTER_ID", None),
|
201
|
+
nodepool_id: Optional[str] = os.environ.get("CLARIFAI_NODEPOOL_ID", None),
|
202
|
+
runner_id: Optional[str] = os.environ.get("CLARIFAI_RUNNER_ID", None),
|
203
|
+
base_url: Optional[str] = os.environ.get("CLARIFAI_API_BASE", "https://api.clarifai.com"),
|
204
|
+
pat: Optional[str] = os.environ.get("CLARIFAI_PAT", None),
|
205
|
+
context=None, # This is the current context object that contains user_id, app_id, model_id, etc.
|
206
|
+
):
|
207
|
+
# `num_threads` can be set in config.yaml or via the environment variable CLARIFAI_NUM_THREADS="<integer>".
|
208
|
+
# Note: The value in config.yaml takes precedence over the environment variable.
|
209
|
+
if num_threads == 0:
|
210
|
+
num_threads = self._builder.config.get("num_threads")
|
211
|
+
# Setup the grpc server for local development.
|
212
|
+
if grpc:
|
213
|
+
self.start_servicer(
|
214
|
+
port,
|
215
|
+
pool_size,
|
216
|
+
max_queue_size,
|
217
|
+
max_msg_length,
|
218
|
+
enable_tls,
|
219
|
+
)
|
220
|
+
else:
|
221
|
+
# start the runner with the proper env variables and as a runner protocol.
|
222
|
+
self.start_runner(
|
223
|
+
context,
|
224
|
+
compute_cluster_id,
|
225
|
+
user_id,
|
226
|
+
nodepool_id,
|
227
|
+
runner_id,
|
228
|
+
base_url,
|
229
|
+
pat,
|
230
|
+
num_threads,
|
231
|
+
)
|
232
|
+
|
233
|
+
def start_servicer(self, port, pool_size, max_queue_size, max_msg_length, enable_tls):
|
234
|
+
# initialize the servicer with the runner so that it gets the predict(), generate(), stream() classes.
|
235
|
+
self._servicer = ModelServicer(self._current_model)
|
236
|
+
|
237
|
+
server = GRPCServer(
|
238
|
+
futures.ThreadPoolExecutor(
|
239
|
+
max_workers=pool_size,
|
240
|
+
thread_name_prefix="ServeCalls",
|
241
|
+
),
|
242
|
+
max_msg_length,
|
243
|
+
max_queue_size,
|
244
|
+
)
|
245
|
+
server.add_port_to_server('[::]:%s' % port, enable_tls)
|
246
|
+
|
247
|
+
service_pb2_grpc.add_V2Servicer_to_server(self._servicer, server)
|
248
|
+
server.start()
|
249
|
+
logger.info("Started server on port %s", port)
|
250
|
+
logger.info(f"Access the model at http://localhost:{port}")
|
251
|
+
server.wait_for_termination()
|
252
|
+
|
253
|
+
def start_runner(
|
254
|
+
self,
|
255
|
+
context,
|
256
|
+
compute_cluster_id,
|
257
|
+
user_id,
|
258
|
+
nodepool_id,
|
259
|
+
runner_id,
|
260
|
+
base_url,
|
261
|
+
pat,
|
262
|
+
num_threads,
|
263
|
+
):
|
264
|
+
# initialize the Runner class. This is what the user implements.
|
265
|
+
assert compute_cluster_id is not None, "compute_cluster_id must be set for the runner."
|
266
|
+
assert nodepool_id is not None, "nodepool_id must be set for the runner"
|
267
|
+
assert runner_id is not None, "runner_id must be set for the runner."
|
268
|
+
assert base_url is not None, "base_url must be set for the runner."
|
269
|
+
self._runner = ModelRunner(
|
270
|
+
model=self._current_model,
|
271
|
+
user_id=user_id,
|
272
|
+
compute_cluster_id=compute_cluster_id,
|
273
|
+
nodepool_id=nodepool_id,
|
274
|
+
runner_id=runner_id,
|
275
|
+
base_url=base_url,
|
276
|
+
pat=pat,
|
277
|
+
num_parallel_polls=num_threads,
|
278
|
+
)
|
279
|
+
|
280
|
+
if context is None:
|
281
|
+
logger.debug("Context is None. Skipping code snippet generation.")
|
282
|
+
else:
|
283
|
+
method_signatures = self._builder.get_method_signatures(mocking=False)
|
284
|
+
from clarifai.runners.utils import code_script
|
285
|
+
|
286
|
+
snippet = code_script.generate_client_script(
|
287
|
+
method_signatures,
|
288
|
+
user_id=context.user_id,
|
289
|
+
app_id=context.app_id,
|
290
|
+
model_id=context.model_id,
|
291
|
+
deployment_id=context.deployment_id,
|
292
|
+
base_url=context.api_base,
|
293
|
+
)
|
294
|
+
logger.info(
|
295
|
+
"✅ Your model is running locally and is ready for requests from the API...\n"
|
296
|
+
)
|
297
|
+
logger.info(
|
298
|
+
f"> Code Snippet: To call your model via the API, use this code snippet:\n{snippet}"
|
299
|
+
)
|
300
|
+
logger.info(
|
301
|
+
f"> Playground: To chat with your model, visit: {context.ui}/playground?model={context.model_id}__{context.model_version_id}&user_id={context.user_id}&app_id={context.app_id}\n"
|
302
|
+
)
|
303
|
+
logger.info(
|
304
|
+
f"> API URL: To call your model via the API, use this model URL: {context.ui}/users/{context.user_id}/apps/{context.app_id}/models/{context.model_id}\n"
|
305
|
+
)
|
306
|
+
logger.info("Press CTRL+C to stop the runner.\n")
|
307
|
+
self._runner.start() # start the runner to fetch work from the API.
|
308
|
+
|
309
|
+
|
310
|
+
if __name__ == '__main__':
|
311
|
+
main()
|
@@ -71,6 +71,7 @@ class HuggingFaceLoader:
|
|
71
71
|
self.ignore_patterns.extend(ignore_file_patterns)
|
72
72
|
else:
|
73
73
|
self.ignore_patterns = ignore_file_patterns
|
74
|
+
|
74
75
|
snapshot_download(
|
75
76
|
repo_id=self.repo_id,
|
76
77
|
local_dir=checkpoint_path,
|
@@ -196,6 +197,7 @@ class HuggingFaceLoader:
|
|
196
197
|
if any(f.endswith(".safetensors") for f in repo_files):
|
197
198
|
self.ignore_patterns = [
|
198
199
|
"**/original/*",
|
200
|
+
"original/*",
|
199
201
|
"**/*.pth",
|
200
202
|
"**/*.bin",
|
201
203
|
"*.pth",
|
@@ -0,0 +1,214 @@
|
|
1
|
+
import os
|
2
|
+
import time
|
3
|
+
from pathlib import Path
|
4
|
+
from threading import Thread
|
5
|
+
from typing import Any, Callable, Optional
|
6
|
+
|
7
|
+
from clarifai_grpc.grpc.api import resources_pb2, service_pb2
|
8
|
+
from google.protobuf import struct_pb2
|
9
|
+
from google.protobuf.json_format import MessageToDict
|
10
|
+
|
11
|
+
from clarifai.utils.logging import logger
|
12
|
+
|
13
|
+
|
14
|
+
def get_secrets_path() -> Optional[Path]:
|
15
|
+
path = os.environ.get("CLARIFAI_SECRETS_PATH", None)
|
16
|
+
return Path(path) if path else None
|
17
|
+
|
18
|
+
|
19
|
+
def load_secrets(path: Path) -> Optional[dict[str, str]]:
|
20
|
+
"""load_secrets reads .env style secret files, sets them as environment variables, and
|
21
|
+
returns the added variables.
|
22
|
+
Args:
|
23
|
+
path (Path): Path to the directory containing secrets files.
|
24
|
+
Returns:
|
25
|
+
dict[str, str] | None: Dict of loaded environment variables, or None if the file does not exist.
|
26
|
+
"""
|
27
|
+
variables = get_env_variable(path)
|
28
|
+
if variables is not None:
|
29
|
+
set_env_variable(variables)
|
30
|
+
return variables
|
31
|
+
return None
|
32
|
+
|
33
|
+
|
34
|
+
def set_env_variable(variables: dict[str, str]) -> None:
|
35
|
+
for key, value in variables.items():
|
36
|
+
os.environ[key] = value
|
37
|
+
|
38
|
+
|
39
|
+
def get_env_variable(path: Path) -> Optional[dict[str, str]]:
|
40
|
+
"""get_env_variable reads .env style secret files and returns variables to be added to the environment.
|
41
|
+
Args:
|
42
|
+
path (Path): Path to the secrets directory.
|
43
|
+
Returns:
|
44
|
+
dict[str, str] | None: Dictionary of environment variable keys and values, or None if the files do not exist.
|
45
|
+
"""
|
46
|
+
if not path.exists() or not path.is_dir():
|
47
|
+
return None
|
48
|
+
loaded_keys = {}
|
49
|
+
for secret_dir in path.iterdir():
|
50
|
+
if not secret_dir.is_dir():
|
51
|
+
continue
|
52
|
+
secrets_file_path = secret_dir / secret_dir.name
|
53
|
+
if secrets_file_path.exists() and secrets_file_path.is_file():
|
54
|
+
secrets = read_secrets_file(secrets_file_path)
|
55
|
+
if secrets:
|
56
|
+
loaded_keys.update(secrets)
|
57
|
+
return loaded_keys
|
58
|
+
|
59
|
+
|
60
|
+
def read_secrets_file(path: Path) -> Optional[dict[str, str]]:
|
61
|
+
"""Read secrets from a single .env formatted file with robust error handling."""
|
62
|
+
if not path.exists() or not path.is_file():
|
63
|
+
logger.warning(f"Secret file does not exist or is not a file: {path}")
|
64
|
+
return None
|
65
|
+
loaded_keys = {}
|
66
|
+
try:
|
67
|
+
with open(path, 'r', encoding='utf-8') as f:
|
68
|
+
for line_num, line in enumerate(f, 1):
|
69
|
+
line = line.strip()
|
70
|
+
if not line or line.startswith('#'):
|
71
|
+
continue
|
72
|
+
if '=' not in line:
|
73
|
+
logger.warning(f"Invalid line format in {path}:{line_num}: {line}")
|
74
|
+
continue
|
75
|
+
key, value = line.split('=', 1)
|
76
|
+
key = key.strip()
|
77
|
+
value = value.strip().strip('"').strip("'")
|
78
|
+
if key: # Only add non-empty keys
|
79
|
+
loaded_keys[key] = value
|
80
|
+
logger.debug(f"Loaded secret key: {key}")
|
81
|
+
except (IOError, OSError, UnicodeDecodeError) as e:
|
82
|
+
logger.error(f"Error reading secrets file {path}: {e}")
|
83
|
+
return None
|
84
|
+
except Exception as e:
|
85
|
+
logger.error(f"Unexpected error reading secrets file {path}: {e}")
|
86
|
+
return None
|
87
|
+
|
88
|
+
return loaded_keys if loaded_keys else None
|
89
|
+
|
90
|
+
|
91
|
+
def start_secrets_watcher(
|
92
|
+
secrets_path: Path, reload_callback: Callable, interval: float = 10
|
93
|
+
) -> Thread:
|
94
|
+
"""start_secrets_watcher starts a background thread that watches the secret file directory for changes
|
95
|
+
and calls the reload_callback when changes are detected.
|
96
|
+
|
97
|
+
Args:
|
98
|
+
secrets_path (Path): Path to the secrets file directory.
|
99
|
+
reload_callback (Callable): Callback function to call when the file changes.
|
100
|
+
interval (float, optional): Interval to wait before checking again. Defaults to 10.
|
101
|
+
"""
|
102
|
+
|
103
|
+
def watch_loop():
|
104
|
+
previous_state = None
|
105
|
+
|
106
|
+
while True:
|
107
|
+
current_state = {}
|
108
|
+
|
109
|
+
# Build current state of all secret files
|
110
|
+
if secrets_path.exists():
|
111
|
+
for secret_dir in secrets_path.iterdir():
|
112
|
+
if not secret_dir.is_dir():
|
113
|
+
continue
|
114
|
+
try:
|
115
|
+
filepath = secret_dir / secret_dir.name
|
116
|
+
if filepath.exists() and filepath.is_file():
|
117
|
+
current_state[secret_dir.name] = filepath.stat().st_mtime
|
118
|
+
except Exception as e:
|
119
|
+
logger.error(f"Error checking secret file {secret_dir.name}: {e}")
|
120
|
+
|
121
|
+
# Trigger callback if state changed (but not on first run)
|
122
|
+
if previous_state is not None and current_state != previous_state:
|
123
|
+
try:
|
124
|
+
logger.info("Secrets changed, calling reload callback...")
|
125
|
+
reload_callback()
|
126
|
+
except Exception as e:
|
127
|
+
logger.error(f"Error in reload callback: {e}")
|
128
|
+
|
129
|
+
previous_state = current_state
|
130
|
+
time.sleep(interval)
|
131
|
+
|
132
|
+
watcher_thread = Thread(target=watch_loop, daemon=True)
|
133
|
+
watcher_thread.start()
|
134
|
+
return watcher_thread
|
135
|
+
|
136
|
+
|
137
|
+
def inject_secrets(request: Optional[service_pb2.PostModelOutputsRequest]) -> None:
|
138
|
+
"""inject_secrets injects secrets into the request's model version output info params.
|
139
|
+
The request is modified in place.
|
140
|
+
|
141
|
+
Args:
|
142
|
+
request (service_pb2.PostModelOutputsRequest): The request to inject secrets into.
|
143
|
+
"""
|
144
|
+
if request is None:
|
145
|
+
return
|
146
|
+
|
147
|
+
if secrets_path := get_secrets_path():
|
148
|
+
# Since only env type secrets are injected into the shared volume, we can read them directly.
|
149
|
+
variables = get_env_variable(secrets_path)
|
150
|
+
else:
|
151
|
+
# If no secrets path is set, assume no secrets and return the request as is.
|
152
|
+
return
|
153
|
+
|
154
|
+
if not request.HasField("model"):
|
155
|
+
request.model.CopyFrom(resources_pb2.Model())
|
156
|
+
if not request.model.HasField("model_version"):
|
157
|
+
request.model.model_version.CopyFrom(resources_pb2.ModelVersion())
|
158
|
+
if not request.model.model_version.HasField("output_info"):
|
159
|
+
request.model.model_version.output_info.CopyFrom(resources_pb2.OutputInfo())
|
160
|
+
if not request.model.model_version.output_info.HasField("params"):
|
161
|
+
request.model.model_version.output_info.params.CopyFrom(struct_pb2.Struct())
|
162
|
+
|
163
|
+
if variables:
|
164
|
+
request.model.model_version.output_info.params.update(variables)
|
165
|
+
return
|
166
|
+
|
167
|
+
|
168
|
+
def get_secrets(
|
169
|
+
request: Optional[service_pb2.PostModelOutputsRequest],
|
170
|
+
) -> dict[str, Any]:
|
171
|
+
"""get_secrets extracts and returns the secrets from the request's model version output info params and environment.
|
172
|
+
|
173
|
+
Args:
|
174
|
+
request (Optional[service_pb2.PostModelOutputsRequest]): The request from which to extract secrets.
|
175
|
+
"""
|
176
|
+
params = {}
|
177
|
+
env_params = {}
|
178
|
+
req_params = {}
|
179
|
+
|
180
|
+
if request is not None:
|
181
|
+
req_params = get_request_secrets(request)
|
182
|
+
|
183
|
+
if secrets_path := get_secrets_path():
|
184
|
+
# Since only env type secrets are injected into the shared volume, we can read them directly.
|
185
|
+
env_params = get_env_variable(secrets_path)
|
186
|
+
if env_params:
|
187
|
+
params.update(env_params)
|
188
|
+
if req_params:
|
189
|
+
params.update(req_params)
|
190
|
+
return params
|
191
|
+
|
192
|
+
|
193
|
+
def get_request_secrets(request: service_pb2.PostModelOutputsRequest) -> Optional[dict[str, Any]]:
|
194
|
+
if (
|
195
|
+
request.HasField("model")
|
196
|
+
and request.model.HasField("model_version")
|
197
|
+
and request.model.model_version.HasField("output_info")
|
198
|
+
and request.model.model_version.output_info.HasField("params")
|
199
|
+
):
|
200
|
+
return MessageToDict(request.model.model_version.output_info.params)
|
201
|
+
return None
|
202
|
+
|
203
|
+
|
204
|
+
def get_secret(param_name: str) -> Optional[str]:
|
205
|
+
"""get_secret retrieves a secret value from environment variables
|
206
|
+
Args:
|
207
|
+
param_name (str): Name of the secret to retrieve.
|
208
|
+
Returns:
|
209
|
+
Optional[str]: The value of the secret if found, otherwise None.
|
210
|
+
"""
|
211
|
+
env_value = os.environ.get(param_name) or os.environ.get(param_name.upper())
|
212
|
+
if env_value:
|
213
|
+
return env_value
|
214
|
+
return None
|