mlrun 1.8.0rc9__py3-none-any.whl → 1.8.0rc12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/artifacts/__init__.py +1 -1
- mlrun/artifacts/document.py +53 -11
- mlrun/common/constants.py +1 -0
- mlrun/common/schemas/__init__.py +2 -0
- mlrun/common/schemas/model_monitoring/__init__.py +1 -0
- mlrun/common/schemas/model_monitoring/constants.py +7 -0
- mlrun/common/schemas/model_monitoring/model_endpoints.py +36 -0
- mlrun/config.py +1 -0
- mlrun/data_types/data_types.py +1 -0
- mlrun/data_types/spark.py +3 -2
- mlrun/data_types/to_pandas.py +11 -2
- mlrun/datastore/__init__.py +2 -0
- mlrun/datastore/targets.py +2 -1
- mlrun/datastore/vectorstore.py +21 -15
- mlrun/db/base.py +2 -0
- mlrun/db/httpdb.py +12 -0
- mlrun/db/nopdb.py +2 -0
- mlrun/feature_store/steps.py +1 -1
- mlrun/model_monitoring/api.py +30 -21
- mlrun/model_monitoring/applications/base.py +42 -4
- mlrun/projects/project.py +18 -16
- mlrun/runtimes/nuclio/serving.py +28 -5
- mlrun/serving/__init__.py +8 -0
- mlrun/serving/merger.py +1 -1
- mlrun/serving/remote.py +17 -5
- mlrun/serving/routers.py +27 -87
- mlrun/serving/server.py +6 -2
- mlrun/serving/states.py +154 -13
- mlrun/serving/v2_serving.py +38 -79
- mlrun/utils/helpers.py +6 -0
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/METADATA +10 -10
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/RECORD +37 -37
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/LICENSE +0 -0
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/WHEEL +0 -0
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/entry_points.txt +0 -0
- {mlrun-1.8.0rc9.dist-info → mlrun-1.8.0rc12.dist-info}/top_level.txt +0 -0
mlrun/serving/states.py
CHANGED
|
@@ -25,11 +25,12 @@ import pathlib
|
|
|
25
25
|
import traceback
|
|
26
26
|
from copy import copy, deepcopy
|
|
27
27
|
from inspect import getfullargspec, signature
|
|
28
|
-
from typing import Any, Optional, Union
|
|
28
|
+
from typing import Any, Optional, Union, cast
|
|
29
29
|
|
|
30
30
|
import storey.utils
|
|
31
31
|
|
|
32
32
|
import mlrun
|
|
33
|
+
import mlrun.common.schemas as schemas
|
|
33
34
|
|
|
34
35
|
from ..config import config
|
|
35
36
|
from ..datastore import get_stream_pusher
|
|
@@ -81,22 +82,28 @@ _task_step_fields = [
|
|
|
81
82
|
"responder",
|
|
82
83
|
"input_path",
|
|
83
84
|
"result_path",
|
|
85
|
+
"model_endpoint_creation_strategy",
|
|
86
|
+
"endpoint_type",
|
|
84
87
|
]
|
|
85
88
|
|
|
86
89
|
|
|
87
90
|
MAX_ALLOWED_STEPS = 4500
|
|
88
91
|
|
|
89
92
|
|
|
90
|
-
def
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def new_remote_endpoint(url, **class_args):
|
|
93
|
+
def new_remote_endpoint(
|
|
94
|
+
url: str,
|
|
95
|
+
creation_strategy: schemas.ModelEndpointCreationStrategy,
|
|
96
|
+
endpoint_type: schemas.EndpointType,
|
|
97
|
+
**class_args,
|
|
98
|
+
):
|
|
97
99
|
class_args = deepcopy(class_args)
|
|
98
100
|
class_args["url"] = url
|
|
99
|
-
return TaskStep(
|
|
101
|
+
return TaskStep(
|
|
102
|
+
"$remote",
|
|
103
|
+
class_args=class_args,
|
|
104
|
+
model_endpoint_creation_strategy=creation_strategy,
|
|
105
|
+
endpoint_type=endpoint_type,
|
|
106
|
+
)
|
|
100
107
|
|
|
101
108
|
|
|
102
109
|
class BaseStep(ModelObj):
|
|
@@ -419,6 +426,10 @@ class TaskStep(BaseStep):
|
|
|
419
426
|
responder: Optional[bool] = None,
|
|
420
427
|
input_path: Optional[str] = None,
|
|
421
428
|
result_path: Optional[str] = None,
|
|
429
|
+
model_endpoint_creation_strategy: Optional[
|
|
430
|
+
schemas.ModelEndpointCreationStrategy
|
|
431
|
+
] = schemas.ModelEndpointCreationStrategy.INPLACE,
|
|
432
|
+
endpoint_type: Optional[schemas.EndpointType] = schemas.EndpointType.NODE_EP,
|
|
422
433
|
):
|
|
423
434
|
super().__init__(name, after)
|
|
424
435
|
self.class_name = class_name
|
|
@@ -438,6 +449,8 @@ class TaskStep(BaseStep):
|
|
|
438
449
|
self.on_error = None
|
|
439
450
|
self._inject_context = False
|
|
440
451
|
self._call_with_event = False
|
|
452
|
+
self.model_endpoint_creation_strategy = model_endpoint_creation_strategy
|
|
453
|
+
self.endpoint_type = endpoint_type
|
|
441
454
|
|
|
442
455
|
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
|
|
443
456
|
self.context = context
|
|
@@ -554,7 +567,11 @@ class TaskStep(BaseStep):
|
|
|
554
567
|
|
|
555
568
|
def _post_init(self, mode="sync"):
|
|
556
569
|
if self._object and hasattr(self._object, "post_init"):
|
|
557
|
-
self._object.post_init(
|
|
570
|
+
self._object.post_init(
|
|
571
|
+
mode,
|
|
572
|
+
creation_strategy=self.model_endpoint_creation_strategy,
|
|
573
|
+
endpoint_type=self.endpoint_type,
|
|
574
|
+
)
|
|
558
575
|
if hasattr(self._object, "model_endpoint_uid"):
|
|
559
576
|
self.endpoint_uid = self._object.model_endpoint_uid
|
|
560
577
|
if hasattr(self._object, "name"):
|
|
@@ -705,6 +722,7 @@ class RouterStep(TaskStep):
|
|
|
705
722
|
)
|
|
706
723
|
self._routes: ObjectDict = None
|
|
707
724
|
self.routes = routes
|
|
725
|
+
self.endpoint_type = schemas.EndpointType.ROUTER
|
|
708
726
|
|
|
709
727
|
def get_children(self):
|
|
710
728
|
"""get child steps (routes)"""
|
|
@@ -726,6 +744,7 @@ class RouterStep(TaskStep):
|
|
|
726
744
|
class_name=None,
|
|
727
745
|
handler=None,
|
|
728
746
|
function=None,
|
|
747
|
+
creation_strategy: schemas.ModelEndpointCreationStrategy = schemas.ModelEndpointCreationStrategy.INPLACE,
|
|
729
748
|
**class_args,
|
|
730
749
|
):
|
|
731
750
|
"""add child route step or class to the router
|
|
@@ -736,12 +755,23 @@ class RouterStep(TaskStep):
|
|
|
736
755
|
:param class_args: class init arguments
|
|
737
756
|
:param handler: class handler to invoke on run/event
|
|
738
757
|
:param function: function this step should run in
|
|
758
|
+
:param creation_strategy: model endpoint creation strategy :
|
|
759
|
+
* overwrite - Create a new model endpoint and delete the last old one if it exists.
|
|
760
|
+
* inplace - Use the existing model endpoint if it already exists (default).
|
|
761
|
+
* archive - Preserve the old model endpoint and create a new one,
|
|
762
|
+
tagging it as the latest.
|
|
739
763
|
"""
|
|
740
764
|
|
|
741
765
|
if not route and not class_name and not handler:
|
|
742
766
|
raise MLRunInvalidArgumentError("route or class_name must be specified")
|
|
743
767
|
if not route:
|
|
744
|
-
route = TaskStep(
|
|
768
|
+
route = TaskStep(
|
|
769
|
+
class_name,
|
|
770
|
+
class_args,
|
|
771
|
+
handler=handler,
|
|
772
|
+
model_endpoint_creation_strategy=creation_strategy,
|
|
773
|
+
endpoint_type=schemas.EndpointType.NODE_EP,
|
|
774
|
+
)
|
|
745
775
|
route.function = function or route.function
|
|
746
776
|
|
|
747
777
|
if len(self._routes) >= MAX_ALLOWED_STEPS:
|
|
@@ -805,6 +835,106 @@ class RouterStep(TaskStep):
|
|
|
805
835
|
)
|
|
806
836
|
|
|
807
837
|
|
|
838
|
+
class Model(storey.ParallelExecutionRunnable):
|
|
839
|
+
def load(self) -> None:
|
|
840
|
+
"""Override to load model if needed."""
|
|
841
|
+
pass
|
|
842
|
+
|
|
843
|
+
def init(self):
|
|
844
|
+
self.load()
|
|
845
|
+
|
|
846
|
+
def predict(self, body: Any) -> Any:
|
|
847
|
+
"""Override to implement prediction logic. If the logic requires asyncio, override predict_async() instead."""
|
|
848
|
+
return body
|
|
849
|
+
|
|
850
|
+
async def predict_async(self, body: Any) -> Any:
|
|
851
|
+
"""Override to implement prediction logic if the logic requires asyncio."""
|
|
852
|
+
return body
|
|
853
|
+
|
|
854
|
+
def run(self, body: Any, path: str) -> Any:
|
|
855
|
+
return self.predict(body)
|
|
856
|
+
|
|
857
|
+
async def run_async(self, body: Any, path: str) -> Any:
|
|
858
|
+
return self.predict(body)
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
class ModelSelector:
|
|
862
|
+
"""Used to select which models to run on each event."""
|
|
863
|
+
|
|
864
|
+
def select(
|
|
865
|
+
self, event, available_models: list[Model]
|
|
866
|
+
) -> Union[list[str], list[Model]]:
|
|
867
|
+
"""
|
|
868
|
+
Given an event, returns a list of model names or a list of model objects to run on the event.
|
|
869
|
+
If None is returned, all models will be run.
|
|
870
|
+
|
|
871
|
+
:param event: The full event
|
|
872
|
+
:param available_models: List of available models
|
|
873
|
+
"""
|
|
874
|
+
pass
|
|
875
|
+
|
|
876
|
+
|
|
877
|
+
class ModelRunner(storey.ParallelExecution):
|
|
878
|
+
"""
|
|
879
|
+
Runs multiple Models on each event. See ModelRunnerStep.
|
|
880
|
+
|
|
881
|
+
:param model_selector: ModelSelector instance whose select() method will be used to select models to run on each
|
|
882
|
+
event. Optional. If not passed, all models will be run.
|
|
883
|
+
"""
|
|
884
|
+
|
|
885
|
+
def __init__(self, *args, model_selector: Optional[ModelSelector] = None, **kwargs):
|
|
886
|
+
super().__init__(*args, **kwargs)
|
|
887
|
+
self.model_selector = model_selector or ModelSelector()
|
|
888
|
+
|
|
889
|
+
def select_runnables(self, event):
|
|
890
|
+
models = cast(list[Model], self.runnables)
|
|
891
|
+
return self.model_selector.select(event, models)
|
|
892
|
+
|
|
893
|
+
|
|
894
|
+
class ModelRunnerStep(TaskStep):
|
|
895
|
+
"""
|
|
896
|
+
Runs multiple Models on each event.
|
|
897
|
+
|
|
898
|
+
example::
|
|
899
|
+
|
|
900
|
+
model_runner_step = ModelRunnerStep(name="my_model_runner")
|
|
901
|
+
model_runner_step.add_model(MyModel(name="my_model"))
|
|
902
|
+
graph.to(model_runner_step)
|
|
903
|
+
|
|
904
|
+
:param model_selector: ModelSelector instance whose select() method will be used to select models to run on each
|
|
905
|
+
event. Optional. If not passed, all models will be run.
|
|
906
|
+
"""
|
|
907
|
+
|
|
908
|
+
kind = "model_runner"
|
|
909
|
+
|
|
910
|
+
def __init__(
|
|
911
|
+
self,
|
|
912
|
+
*args,
|
|
913
|
+
model_selector: Optional[Union[str, ModelSelector]] = None,
|
|
914
|
+
**kwargs,
|
|
915
|
+
):
|
|
916
|
+
self._models = []
|
|
917
|
+
super().__init__(
|
|
918
|
+
*args,
|
|
919
|
+
class_name="mlrun.serving.ModelRunner",
|
|
920
|
+
class_args=dict(runnables=self._models, model_selector=model_selector),
|
|
921
|
+
**kwargs,
|
|
922
|
+
)
|
|
923
|
+
|
|
924
|
+
def add_model(self, model: Model) -> None:
|
|
925
|
+
"""Add a Model to this ModelRunner."""
|
|
926
|
+
self._models.append(model)
|
|
927
|
+
|
|
928
|
+
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
|
|
929
|
+
model_selector = self.class_args.get("model_selector")
|
|
930
|
+
if isinstance(model_selector, str):
|
|
931
|
+
model_selector = get_class(model_selector, namespace)()
|
|
932
|
+
self._async_object = ModelRunner(
|
|
933
|
+
self.class_args.get("runnables"),
|
|
934
|
+
model_selector=model_selector,
|
|
935
|
+
)
|
|
936
|
+
|
|
937
|
+
|
|
808
938
|
class QueueStep(BaseStep):
|
|
809
939
|
"""queue step, implement an async queue or represent a stream"""
|
|
810
940
|
|
|
@@ -1344,8 +1474,9 @@ class FlowStep(BaseStep):
|
|
|
1344
1474
|
|
|
1345
1475
|
if self._controller:
|
|
1346
1476
|
if hasattr(self._controller, "terminate"):
|
|
1347
|
-
self._controller.terminate()
|
|
1348
|
-
|
|
1477
|
+
return self._controller.terminate(wait=True)
|
|
1478
|
+
else:
|
|
1479
|
+
return self._controller.await_termination()
|
|
1349
1480
|
|
|
1350
1481
|
def plot(self, filename=None, format=None, source=None, targets=None, **kw):
|
|
1351
1482
|
"""plot/save graph using graphviz
|
|
@@ -1433,6 +1564,7 @@ classes_map = {
|
|
|
1433
1564
|
"queue": QueueStep,
|
|
1434
1565
|
"error_step": ErrorStep,
|
|
1435
1566
|
"monitoring_application": MonitoringApplicationStep,
|
|
1567
|
+
"model_runner": ModelRunnerStep,
|
|
1436
1568
|
}
|
|
1437
1569
|
|
|
1438
1570
|
|
|
@@ -1572,6 +1704,10 @@ def params_to_step(
|
|
|
1572
1704
|
input_path: Optional[str] = None,
|
|
1573
1705
|
result_path: Optional[str] = None,
|
|
1574
1706
|
class_args=None,
|
|
1707
|
+
model_endpoint_creation_strategy: Optional[
|
|
1708
|
+
schemas.ModelEndpointCreationStrategy
|
|
1709
|
+
] = None,
|
|
1710
|
+
endpoint_type: Optional[schemas.EndpointType] = None,
|
|
1575
1711
|
):
|
|
1576
1712
|
"""return step object from provided params or classes/objects"""
|
|
1577
1713
|
|
|
@@ -1587,6 +1723,9 @@ def params_to_step(
|
|
|
1587
1723
|
step.full_event = full_event or step.full_event
|
|
1588
1724
|
step.input_path = input_path or step.input_path
|
|
1589
1725
|
step.result_path = result_path or step.result_path
|
|
1726
|
+
if kind == StepKinds.task:
|
|
1727
|
+
step.model_endpoint_creation_strategy = model_endpoint_creation_strategy
|
|
1728
|
+
step.endpoint_type = endpoint_type
|
|
1590
1729
|
|
|
1591
1730
|
elif class_name and class_name in queue_class_names:
|
|
1592
1731
|
if "path" not in class_args:
|
|
@@ -1627,6 +1766,8 @@ def params_to_step(
|
|
|
1627
1766
|
full_event=full_event,
|
|
1628
1767
|
input_path=input_path,
|
|
1629
1768
|
result_path=result_path,
|
|
1769
|
+
model_endpoint_creation_strategy=model_endpoint_creation_strategy,
|
|
1770
|
+
endpoint_type=endpoint_type,
|
|
1630
1771
|
)
|
|
1631
1772
|
else:
|
|
1632
1773
|
raise MLRunInvalidArgumentError("class_name or handler must be provided")
|
mlrun/serving/v2_serving.py
CHANGED
|
@@ -23,7 +23,6 @@ import mlrun.common.schemas.model_monitoring
|
|
|
23
23
|
import mlrun.model_monitoring
|
|
24
24
|
from mlrun.utils import logger, now_date
|
|
25
25
|
|
|
26
|
-
from ..common.schemas.model_monitoring import ModelEndpointSchema
|
|
27
26
|
from .server import GraphServer
|
|
28
27
|
from .utils import StepToDict, _extract_input_data, _update_result_body
|
|
29
28
|
|
|
@@ -130,7 +129,7 @@ class V2ModelServer(StepToDict):
|
|
|
130
129
|
self.ready = True
|
|
131
130
|
self.context.logger.info(f"model {self.name} was loaded")
|
|
132
131
|
|
|
133
|
-
def post_init(self, mode="sync"):
|
|
132
|
+
def post_init(self, mode="sync", **kwargs):
|
|
134
133
|
"""sync/async model loading, for internal use"""
|
|
135
134
|
if not self.ready:
|
|
136
135
|
if mode == "async":
|
|
@@ -149,7 +148,10 @@ class V2ModelServer(StepToDict):
|
|
|
149
148
|
|
|
150
149
|
if not self.context.is_mock or self.context.monitoring_mock:
|
|
151
150
|
self.model_endpoint_uid = _init_endpoint_record(
|
|
152
|
-
graph_server=server,
|
|
151
|
+
graph_server=server,
|
|
152
|
+
model=self,
|
|
153
|
+
creation_strategy=kwargs.get("creation_strategy"),
|
|
154
|
+
endpoint_type=kwargs.get("endpoint_type"),
|
|
153
155
|
)
|
|
154
156
|
self._model_logger = (
|
|
155
157
|
_ModelLogPusher(self, self.context)
|
|
@@ -554,7 +556,10 @@ class _ModelLogPusher:
|
|
|
554
556
|
|
|
555
557
|
|
|
556
558
|
def _init_endpoint_record(
|
|
557
|
-
graph_server: GraphServer,
|
|
559
|
+
graph_server: GraphServer,
|
|
560
|
+
model: V2ModelServer,
|
|
561
|
+
creation_strategy: str,
|
|
562
|
+
endpoint_type: mlrun.common.schemas.EndpointType,
|
|
558
563
|
) -> Union[str, None]:
|
|
559
564
|
"""
|
|
560
565
|
Initialize model endpoint record and write it into the DB. In general, this method retrieve the unique model
|
|
@@ -564,6 +569,12 @@ def _init_endpoint_record(
|
|
|
564
569
|
:param graph_server: A GraphServer object which will be used for getting the function uri.
|
|
565
570
|
:param model: Base model serving class (v2). It contains important details for the model endpoint record
|
|
566
571
|
such as model name, model path, and model version.
|
|
572
|
+
:param creation_strategy: model endpoint creation strategy :
|
|
573
|
+
* overwrite - Create a new model endpoint and delete the last old one if it exists.
|
|
574
|
+
* inplace - Use the existing model endpoint if it already exists (default).
|
|
575
|
+
* archive - Preserve the old model endpoint and create a new one,
|
|
576
|
+
tagging it as the latest.
|
|
577
|
+
:param endpoint_type model endpoint type
|
|
567
578
|
|
|
568
579
|
:return: Model endpoint unique ID.
|
|
569
580
|
"""
|
|
@@ -573,62 +584,46 @@ def _init_endpoint_record(
|
|
|
573
584
|
model.get_model()
|
|
574
585
|
if model.model_spec:
|
|
575
586
|
model_name = model.model_spec.metadata.key
|
|
587
|
+
model_db_key = model.model_spec.spec.db_key
|
|
576
588
|
model_uid = model.model_spec.metadata.uid
|
|
577
589
|
model_tag = model.model_spec.tag
|
|
578
590
|
model_labels = model.model_spec.labels # todo : check if we still need this
|
|
579
591
|
else:
|
|
580
592
|
model_name = None
|
|
593
|
+
model_db_key = None
|
|
581
594
|
model_uid = None
|
|
582
595
|
model_tag = None
|
|
583
596
|
model_labels = {}
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
name=model.name,
|
|
588
|
-
function_name=graph_server.function_name,
|
|
589
|
-
function_tag=graph_server.function_tag or "latest",
|
|
590
|
-
)
|
|
591
|
-
except mlrun.errors.MLRunNotFoundError:
|
|
592
|
-
model_ep = None
|
|
593
|
-
except mlrun.errors.MLRunBadRequestError as err:
|
|
594
|
-
logger.info(
|
|
595
|
-
"Cannot get the model endpoints store", err=mlrun.errors.err_to_str(err)
|
|
596
|
-
)
|
|
597
|
-
return
|
|
598
|
-
|
|
599
|
-
function = mlrun.get_run_db().get_function(
|
|
600
|
-
name=graph_server.function_name,
|
|
597
|
+
logger.info(
|
|
598
|
+
"Creating Or Updating a new model endpoint record",
|
|
599
|
+
name=model.name,
|
|
601
600
|
project=graph_server.project,
|
|
602
|
-
|
|
601
|
+
function_name=graph_server.function_name,
|
|
602
|
+
function_tag=graph_server.function_tag or "latest",
|
|
603
|
+
model_name=model_name,
|
|
604
|
+
model_tag=model_tag,
|
|
605
|
+
model_db_key=model_db_key,
|
|
606
|
+
model_uid=model_uid,
|
|
607
|
+
model_class=model.__class__.__name__,
|
|
608
|
+
creation_strategy=creation_strategy,
|
|
609
|
+
endpoint_type=endpoint_type,
|
|
603
610
|
)
|
|
604
|
-
|
|
605
|
-
if not model_ep and model.context.server.track_models:
|
|
606
|
-
logger.info(
|
|
607
|
-
"Creating a new model endpoint record",
|
|
608
|
-
name=model.name,
|
|
609
|
-
project=graph_server.project,
|
|
610
|
-
function_name=graph_server.function_name,
|
|
611
|
-
function_tag=graph_server.function_tag or "latest",
|
|
612
|
-
function_uid=function_uid,
|
|
613
|
-
model_name=model_name,
|
|
614
|
-
model_uid=model_uid,
|
|
615
|
-
model_class=model.__class__.__name__,
|
|
616
|
-
model_tag=model_tag,
|
|
617
|
-
)
|
|
611
|
+
try:
|
|
618
612
|
model_ep = mlrun.common.schemas.ModelEndpoint(
|
|
619
613
|
metadata=mlrun.common.schemas.ModelEndpointMetadata(
|
|
620
614
|
project=graph_server.project,
|
|
621
615
|
labels=model_labels,
|
|
622
616
|
name=model.name,
|
|
623
|
-
endpoint_type=
|
|
617
|
+
endpoint_type=endpoint_type,
|
|
624
618
|
),
|
|
625
619
|
spec=mlrun.common.schemas.ModelEndpointSpec(
|
|
626
620
|
function_name=graph_server.function_name,
|
|
627
|
-
function_uid=function_uid,
|
|
628
621
|
function_tag=graph_server.function_tag or "latest",
|
|
629
622
|
model_name=model_name,
|
|
623
|
+
model_db_key=model_db_key,
|
|
630
624
|
model_uid=model_uid,
|
|
631
625
|
model_class=model.__class__.__name__,
|
|
626
|
+
model_tag=model_tag,
|
|
632
627
|
),
|
|
633
628
|
status=mlrun.common.schemas.ModelEndpointStatus(
|
|
634
629
|
monitoring_mode=mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
|
|
@@ -637,47 +632,11 @@ def _init_endpoint_record(
|
|
|
637
632
|
),
|
|
638
633
|
)
|
|
639
634
|
db = mlrun.get_run_db()
|
|
640
|
-
model_ep = db.create_model_endpoint(
|
|
641
|
-
|
|
642
|
-
|
|
643
|
-
|
|
644
|
-
|
|
645
|
-
attributes[ModelEndpointSchema.FUNCTION_UID] = function_uid
|
|
646
|
-
if model_name != model_ep.spec.model_name:
|
|
647
|
-
attributes[ModelEndpointSchema.MODEL_NAME] = model_name
|
|
648
|
-
if model_uid != model_ep.spec.model_uid:
|
|
649
|
-
attributes[ModelEndpointSchema.MODEL_UID] = model_uid
|
|
650
|
-
if model_tag != model_ep.spec.model_tag:
|
|
651
|
-
attributes[ModelEndpointSchema.MODEL_TAG] = model_tag
|
|
652
|
-
if model_labels != model_ep.metadata.labels:
|
|
653
|
-
attributes[ModelEndpointSchema.LABELS] = model_labels
|
|
654
|
-
if model.__class__.__name__ != model_ep.spec.model_class:
|
|
655
|
-
attributes[ModelEndpointSchema.MODEL_CLASS] = model.__class__.__name__
|
|
656
|
-
if (
|
|
657
|
-
model_ep.status.monitoring_mode
|
|
658
|
-
== mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
|
|
659
|
-
) != model.context.server.track_models:
|
|
660
|
-
attributes[ModelEndpointSchema.MONITORING_MODE] = (
|
|
661
|
-
mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
|
|
662
|
-
if model.context.server.track_models
|
|
663
|
-
else mlrun.common.schemas.model_monitoring.ModelMonitoringMode.disabled
|
|
664
|
-
)
|
|
665
|
-
if attributes:
|
|
666
|
-
logger.info(
|
|
667
|
-
"Updating model endpoint attributes",
|
|
668
|
-
attributes=attributes,
|
|
669
|
-
project=model_ep.metadata.project,
|
|
670
|
-
name=model_ep.metadata.name,
|
|
671
|
-
function_name=model_ep.spec.function_name,
|
|
672
|
-
)
|
|
673
|
-
db = mlrun.get_run_db()
|
|
674
|
-
model_ep = db.patch_model_endpoint(
|
|
675
|
-
project=model_ep.metadata.project,
|
|
676
|
-
name=model_ep.metadata.name,
|
|
677
|
-
endpoint_id=model_ep.metadata.uid,
|
|
678
|
-
attributes=attributes,
|
|
679
|
-
)
|
|
680
|
-
else:
|
|
635
|
+
model_ep = db.create_model_endpoint(
|
|
636
|
+
model_endpoint=model_ep, creation_strategy=creation_strategy
|
|
637
|
+
)
|
|
638
|
+
except mlrun.errors.MLRunBadRequestError as e:
|
|
639
|
+
logger.info("Failed to create model endpoint record", error=e)
|
|
681
640
|
return None
|
|
682
641
|
|
|
683
642
|
return model_ep.metadata.uid
|
mlrun/utils/helpers.py
CHANGED
|
@@ -111,15 +111,21 @@ def get_artifact_target(item: dict, project=None):
|
|
|
111
111
|
project_str = project or item["metadata"].get("project")
|
|
112
112
|
tree = item["metadata"].get("tree")
|
|
113
113
|
tag = item["metadata"].get("tag")
|
|
114
|
+
iter = item["metadata"].get("iter")
|
|
114
115
|
kind = item.get("kind")
|
|
116
|
+
uid = item["metadata"].get("uid")
|
|
115
117
|
|
|
116
118
|
if kind in {"dataset", "model", "artifact"} and db_key:
|
|
117
119
|
target = (
|
|
118
120
|
f"{DB_SCHEMA}://{StorePrefix.kind_to_prefix(kind)}/{project_str}/{db_key}"
|
|
119
121
|
)
|
|
122
|
+
if iter:
|
|
123
|
+
target = f"{target}#{iter}"
|
|
120
124
|
target += f":{tag}" if tag else ":latest"
|
|
121
125
|
if tree:
|
|
122
126
|
target += f"@{tree}"
|
|
127
|
+
if uid:
|
|
128
|
+
target += f"^{uid}"
|
|
123
129
|
return target
|
|
124
130
|
|
|
125
131
|
return item["spec"].get("target_path")
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: mlrun
|
|
3
|
-
Version: 1.8.
|
|
3
|
+
Version: 1.8.0rc12
|
|
4
4
|
Summary: Tracking and config of machine learning runs
|
|
5
5
|
Home-page: https://github.com/mlrun/mlrun
|
|
6
6
|
Author: Yaron Haviv
|
|
@@ -44,7 +44,7 @@ Requires-Dist: semver~=3.0
|
|
|
44
44
|
Requires-Dist: dependency-injector~=4.41
|
|
45
45
|
Requires-Dist: fsspec<2024.7,>=2023.9.2
|
|
46
46
|
Requires-Dist: v3iofs~=0.1.17
|
|
47
|
-
Requires-Dist: storey~=1.8.
|
|
47
|
+
Requires-Dist: storey~=1.8.6
|
|
48
48
|
Requires-Dist: inflection~=0.5.0
|
|
49
49
|
Requires-Dist: python-dotenv~=1.0
|
|
50
50
|
Requires-Dist: setuptools>=75.2
|
|
@@ -103,16 +103,16 @@ Requires-Dist: taoswswrap~=0.2.0; extra == "tdengine"
|
|
|
103
103
|
Provides-Extra: snowflake
|
|
104
104
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
|
|
105
105
|
Provides-Extra: api
|
|
106
|
-
Requires-Dist: uvicorn~=0.
|
|
106
|
+
Requires-Dist: uvicorn~=0.32.1; extra == "api"
|
|
107
107
|
Requires-Dist: dask-kubernetes~=0.11.0; extra == "api"
|
|
108
108
|
Requires-Dist: apscheduler<4,>=3.11; extra == "api"
|
|
109
109
|
Requires-Dist: objgraph~=3.6; extra == "api"
|
|
110
110
|
Requires-Dist: igz-mgmt~=0.4.1; extra == "api"
|
|
111
111
|
Requires-Dist: humanfriendly~=10.0; extra == "api"
|
|
112
|
-
Requires-Dist: fastapi~=0.
|
|
112
|
+
Requires-Dist: fastapi~=0.115.6; extra == "api"
|
|
113
113
|
Requires-Dist: sqlalchemy~=1.4; extra == "api"
|
|
114
|
-
Requires-Dist: pymysql~=1.
|
|
115
|
-
Requires-Dist: alembic~=1.
|
|
114
|
+
Requires-Dist: pymysql~=1.1; extra == "api"
|
|
115
|
+
Requires-Dist: alembic~=1.14; extra == "api"
|
|
116
116
|
Requires-Dist: timelength~=1.1; extra == "api"
|
|
117
117
|
Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
|
|
118
118
|
Requires-Dist: aiosmtplib~=3.0; extra == "api"
|
|
@@ -181,7 +181,7 @@ Provides-Extra: complete-api
|
|
|
181
181
|
Requires-Dist: adlfs==2023.9.0; extra == "complete-api"
|
|
182
182
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete-api"
|
|
183
183
|
Requires-Dist: aiosmtplib~=3.0; extra == "complete-api"
|
|
184
|
-
Requires-Dist: alembic~=1.
|
|
184
|
+
Requires-Dist: alembic~=1.14; extra == "complete-api"
|
|
185
185
|
Requires-Dist: apscheduler<4,>=3.11; extra == "complete-api"
|
|
186
186
|
Requires-Dist: avro~=1.11; extra == "complete-api"
|
|
187
187
|
Requires-Dist: azure-core~=1.24; extra == "complete-api"
|
|
@@ -192,7 +192,7 @@ Requires-Dist: dask-kubernetes~=0.11.0; extra == "complete-api"
|
|
|
192
192
|
Requires-Dist: dask~=2023.12.1; extra == "complete-api"
|
|
193
193
|
Requires-Dist: databricks-sdk~=0.13.0; extra == "complete-api"
|
|
194
194
|
Requires-Dist: distributed~=2023.12.1; extra == "complete-api"
|
|
195
|
-
Requires-Dist: fastapi~=0.
|
|
195
|
+
Requires-Dist: fastapi~=0.115.6; extra == "complete-api"
|
|
196
196
|
Requires-Dist: gcsfs<2024.7,>=2023.9.2; extra == "complete-api"
|
|
197
197
|
Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete-api"
|
|
198
198
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "complete-api"
|
|
@@ -209,7 +209,7 @@ Requires-Dist: objgraph~=3.6; extra == "complete-api"
|
|
|
209
209
|
Requires-Dist: oss2==2.18.1; extra == "complete-api"
|
|
210
210
|
Requires-Dist: ossfs==2023.12.0; extra == "complete-api"
|
|
211
211
|
Requires-Dist: plotly~=5.23; extra == "complete-api"
|
|
212
|
-
Requires-Dist: pymysql~=1.
|
|
212
|
+
Requires-Dist: pymysql~=1.1; extra == "complete-api"
|
|
213
213
|
Requires-Dist: pyopenssl>=23; extra == "complete-api"
|
|
214
214
|
Requires-Dist: redis~=4.3; extra == "complete-api"
|
|
215
215
|
Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete-api"
|
|
@@ -218,7 +218,7 @@ Requires-Dist: sqlalchemy~=1.4; extra == "complete-api"
|
|
|
218
218
|
Requires-Dist: taos-ws-py==0.3.2; extra == "complete-api"
|
|
219
219
|
Requires-Dist: taoswswrap~=0.2.0; extra == "complete-api"
|
|
220
220
|
Requires-Dist: timelength~=1.1; extra == "complete-api"
|
|
221
|
-
Requires-Dist: uvicorn~=0.
|
|
221
|
+
Requires-Dist: uvicorn~=0.32.1; extra == "complete-api"
|
|
222
222
|
|
|
223
223
|
<a id="top"></a>
|
|
224
224
|
[](https://github.com/mlrun/mlrun/actions/workflows/build.yaml?query=branch%3Adevelopment)
|