mlrun 1.7.0rc21__py3-none-any.whl → 1.7.0rc22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/alerts/alert.py +42 -17
- mlrun/config.py +3 -0
- mlrun/execution.py +2 -0
- mlrun/frameworks/__init__.py +6 -0
- mlrun/launcher/local.py +4 -0
- mlrun/launcher/remote.py +1 -0
- mlrun/model.py +2 -0
- mlrun/model_monitoring/api.py +1 -0
- mlrun/model_monitoring/applications/base.py +3 -3
- mlrun/model_monitoring/db/tsdb/base.py +0 -13
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +22 -18
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +66 -44
- mlrun/model_monitoring/helpers.py +17 -0
- mlrun/package/__init__.py +13 -1
- mlrun/package/packagers/__init__.py +6 -1
- mlrun/projects/operations.py +5 -0
- mlrun/projects/project.py +6 -0
- mlrun/runtimes/base.py +6 -0
- mlrun/runtimes/daskjob.py +1 -0
- mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
- mlrun/runtimes/local.py +7 -1
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/states.py +51 -8
- mlrun/serving/utils.py +19 -11
- mlrun/utils/helpers.py +51 -9
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/RECORD +32 -32
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc22.dist-info}/top_level.txt +0 -0
mlrun/alerts/alert.py
CHANGED
|
@@ -30,6 +30,11 @@ class AlertConfig(ModelObj):
|
|
|
30
30
|
"reset_policy",
|
|
31
31
|
"state",
|
|
32
32
|
]
|
|
33
|
+
_fields_to_serialize = ModelObj._fields_to_serialize + [
|
|
34
|
+
"entities",
|
|
35
|
+
"notifications",
|
|
36
|
+
"trigger",
|
|
37
|
+
]
|
|
33
38
|
|
|
34
39
|
def __init__(
|
|
35
40
|
self,
|
|
@@ -71,24 +76,44 @@ class AlertConfig(ModelObj):
|
|
|
71
76
|
if not self.project or not self.name:
|
|
72
77
|
raise mlrun.errors.MLRunBadRequestError("Project and name must be provided")
|
|
73
78
|
|
|
79
|
+
def _serialize_field(
|
|
80
|
+
self, struct: dict, field_name: str = None, strip: bool = False
|
|
81
|
+
):
|
|
82
|
+
if field_name == "entities":
|
|
83
|
+
if self.entities:
|
|
84
|
+
return (
|
|
85
|
+
self.entities.dict()
|
|
86
|
+
if not isinstance(self.entities, dict)
|
|
87
|
+
else self.entities
|
|
88
|
+
)
|
|
89
|
+
return None
|
|
90
|
+
if field_name == "notifications":
|
|
91
|
+
if self.notifications:
|
|
92
|
+
return [
|
|
93
|
+
notification_data.dict()
|
|
94
|
+
if not isinstance(notification_data, dict)
|
|
95
|
+
else notification_data
|
|
96
|
+
for notification_data in self.notifications
|
|
97
|
+
]
|
|
98
|
+
return None
|
|
99
|
+
if field_name == "trigger":
|
|
100
|
+
if self.trigger:
|
|
101
|
+
return (
|
|
102
|
+
self.trigger.dict()
|
|
103
|
+
if not isinstance(self.trigger, dict)
|
|
104
|
+
else self.trigger
|
|
105
|
+
)
|
|
106
|
+
return None
|
|
107
|
+
return super()._serialize_field(struct, field_name, strip)
|
|
108
|
+
|
|
74
109
|
def to_dict(self, fields: list = None, exclude: list = None, strip: bool = False):
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
)
|
|
82
|
-
data["notifications"] = [
|
|
83
|
-
notification_data.dict()
|
|
84
|
-
if not isinstance(notification_data, dict)
|
|
85
|
-
else notification_data
|
|
86
|
-
for notification_data in self.notifications
|
|
87
|
-
]
|
|
88
|
-
data["trigger"] = (
|
|
89
|
-
self.trigger.dict() if not isinstance(self.trigger, dict) else self.trigger
|
|
90
|
-
)
|
|
91
|
-
return data
|
|
110
|
+
if self.entities is None:
|
|
111
|
+
raise mlrun.errors.MLRunBadRequestError("Alert entity field is missing")
|
|
112
|
+
if not self.notifications:
|
|
113
|
+
raise mlrun.errors.MLRunBadRequestError(
|
|
114
|
+
"Alert must have at least one notification"
|
|
115
|
+
)
|
|
116
|
+
return super().to_dict(self._dict_fields)
|
|
92
117
|
|
|
93
118
|
@classmethod
|
|
94
119
|
def from_dict(cls, struct=None, fields=None, deprecated_fields: dict = None):
|
mlrun/config.py
CHANGED
|
@@ -229,6 +229,9 @@ default_config = {
|
|
|
229
229
|
"executing": "24h",
|
|
230
230
|
}
|
|
231
231
|
},
|
|
232
|
+
# When the module is reloaded, the maximum depth recursion configuration for the recursive reload
|
|
233
|
+
# function is used to prevent infinite loop
|
|
234
|
+
"reload_max_recursion_depth": 100,
|
|
232
235
|
},
|
|
233
236
|
"databricks": {
|
|
234
237
|
"artifact_directory_path": "/mlrun_databricks_runtime/artifacts_dictionaries"
|
mlrun/execution.py
CHANGED
|
@@ -111,6 +111,7 @@ class MLClientCtx:
|
|
|
111
111
|
|
|
112
112
|
self._project_object = None
|
|
113
113
|
self._allow_empty_resources = None
|
|
114
|
+
self._reset_on_run = None
|
|
114
115
|
|
|
115
116
|
def __enter__(self):
|
|
116
117
|
return self
|
|
@@ -389,6 +390,7 @@ class MLClientCtx:
|
|
|
389
390
|
self._state_thresholds = spec.get(
|
|
390
391
|
"state_thresholds", self._state_thresholds
|
|
391
392
|
)
|
|
393
|
+
self._reset_on_run = spec.get("reset_on_run", self._reset_on_run)
|
|
392
394
|
|
|
393
395
|
self._init_dbs(rundb)
|
|
394
396
|
|
mlrun/frameworks/__init__.py
CHANGED
|
@@ -12,5 +12,11 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
|
+
"""
|
|
16
|
+
MLRun provides a quick and easy integration into your code with mlrun.frameworks: a collection of sub-modules
|
|
17
|
+
for the most commonly used machine and deep learning frameworks, providing features such as automatic logging,
|
|
18
|
+
model management, and distributed training.
|
|
19
|
+
"""
|
|
20
|
+
|
|
15
21
|
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
16
22
|
from .parallel_coordinates import compare_db_runs, compare_run_objects
|
mlrun/launcher/local.py
CHANGED
|
@@ -69,6 +69,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
69
69
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
70
70
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
71
71
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
72
|
+
reset_on_run: Optional[bool] = None,
|
|
72
73
|
) -> "mlrun.run.RunObject":
|
|
73
74
|
# do not allow local function to be scheduled
|
|
74
75
|
if self._is_run_local and schedule is not None:
|
|
@@ -88,6 +89,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
88
89
|
name=name,
|
|
89
90
|
workdir=workdir,
|
|
90
91
|
handler=handler,
|
|
92
|
+
reset_on_run=reset_on_run,
|
|
91
93
|
)
|
|
92
94
|
|
|
93
95
|
# sanity check
|
|
@@ -212,6 +214,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
212
214
|
name: Optional[str] = "",
|
|
213
215
|
workdir: Optional[str] = "",
|
|
214
216
|
handler: Optional[str] = None,
|
|
217
|
+
reset_on_run: Optional[bool] = None,
|
|
215
218
|
):
|
|
216
219
|
project = project or runtime.metadata.project
|
|
217
220
|
function_name = name or runtime.metadata.name
|
|
@@ -250,6 +253,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
250
253
|
fn.spec.build = runtime.spec.build
|
|
251
254
|
|
|
252
255
|
run.spec.handler = handler
|
|
256
|
+
run.spec.reset_on_run = reset_on_run
|
|
253
257
|
return fn
|
|
254
258
|
|
|
255
259
|
@staticmethod
|
mlrun/launcher/remote.py
CHANGED
|
@@ -59,6 +59,7 @@ class ClientRemoteLauncher(launcher.ClientBaseLauncher):
|
|
|
59
59
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
60
60
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
61
61
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
62
|
+
reset_on_run: Optional[bool] = None,
|
|
62
63
|
) -> "mlrun.run.RunObject":
|
|
63
64
|
self.enrich_runtime(runtime, project)
|
|
64
65
|
run = self._create_run_object(task)
|
mlrun/model.py
CHANGED
|
@@ -872,6 +872,7 @@ class RunSpec(ModelObj):
|
|
|
872
872
|
returns=None,
|
|
873
873
|
notifications=None,
|
|
874
874
|
state_thresholds=None,
|
|
875
|
+
reset_on_run=None,
|
|
875
876
|
):
|
|
876
877
|
# A dictionary of parsing configurations that will be read from the inputs the user set. The keys are the inputs
|
|
877
878
|
# keys (parameter names) and the values are the type hint given in the input keys after the colon.
|
|
@@ -908,6 +909,7 @@ class RunSpec(ModelObj):
|
|
|
908
909
|
self.allow_empty_resources = allow_empty_resources
|
|
909
910
|
self._notifications = notifications or []
|
|
910
911
|
self.state_thresholds = state_thresholds or {}
|
|
912
|
+
self.reset_on_run = reset_on_run
|
|
911
913
|
|
|
912
914
|
def _serialize_field(
|
|
913
915
|
self, struct: dict, field_name: str = None, strip: bool = False
|
mlrun/model_monitoring/api.py
CHANGED
|
@@ -645,6 +645,7 @@ def _create_model_monitoring_function_base(
|
|
|
645
645
|
app_step = prepare_step.to(class_name=application_class, **application_kwargs)
|
|
646
646
|
else:
|
|
647
647
|
app_step = prepare_step.to(class_name=application_class)
|
|
648
|
+
app_step.__class__ = mlrun.serving.MonitoringApplicationStep
|
|
648
649
|
app_step.to(
|
|
649
650
|
class_name="mlrun.model_monitoring.applications._application_steps._PushToMonitoringWriter",
|
|
650
651
|
name="PushToMonitoringWriter",
|
|
@@ -21,10 +21,10 @@ import pandas as pd
|
|
|
21
21
|
import mlrun
|
|
22
22
|
import mlrun.model_monitoring.applications.context as mm_context
|
|
23
23
|
import mlrun.model_monitoring.applications.results as mm_results
|
|
24
|
-
from mlrun.serving.utils import
|
|
24
|
+
from mlrun.serving.utils import MonitoringApplicationToDict
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
class ModelMonitoringApplicationBaseV2(
|
|
27
|
+
class ModelMonitoringApplicationBaseV2(MonitoringApplicationToDict, ABC):
|
|
28
28
|
"""
|
|
29
29
|
A base class for a model monitoring application.
|
|
30
30
|
Inherit from this class to create a custom model monitoring application.
|
|
@@ -112,7 +112,7 @@ class ModelMonitoringApplicationBaseV2(StepToDict, ABC):
|
|
|
112
112
|
raise NotImplementedError
|
|
113
113
|
|
|
114
114
|
|
|
115
|
-
class ModelMonitoringApplicationBase(
|
|
115
|
+
class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
|
|
116
116
|
"""
|
|
117
117
|
A base class for a model monitoring application.
|
|
118
118
|
Inherit from this class to create a custom model monitoring application.
|
|
@@ -177,19 +177,6 @@ class TSDBConnector(ABC):
|
|
|
177
177
|
:return: Metric values object or no data object.
|
|
178
178
|
"""
|
|
179
179
|
|
|
180
|
-
@abstractmethod
|
|
181
|
-
def read_prediction_metric_for_endpoint_if_exists(
|
|
182
|
-
self, endpoint_id: str
|
|
183
|
-
) -> typing.Optional[mm_schemas.ModelEndpointMonitoringMetric]:
|
|
184
|
-
"""
|
|
185
|
-
Read the "invocations" metric for the provided model endpoint, and return the metric object
|
|
186
|
-
if it exists.
|
|
187
|
-
|
|
188
|
-
:param endpoint_id: The model endpoint identifier.
|
|
189
|
-
:return: `None` if the invocations metric does not exist, otherwise return the
|
|
190
|
-
corresponding metric object.
|
|
191
|
-
"""
|
|
192
|
-
|
|
193
180
|
@staticmethod
|
|
194
181
|
def df_to_metrics_values(
|
|
195
182
|
*,
|
|
@@ -377,21 +377,25 @@ class TDEngineConnector(TSDBConnector):
|
|
|
377
377
|
), # pyright: ignore[reportArgumentType]
|
|
378
378
|
)
|
|
379
379
|
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
|
|
383
|
-
|
|
384
|
-
|
|
385
|
-
|
|
386
|
-
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
396
|
-
|
|
397
|
-
|
|
380
|
+
# Note: this function serves as a reference for checking the TSDB for the existence of a metric.
|
|
381
|
+
#
|
|
382
|
+
# def read_prediction_metric_for_endpoint_if_exists(
|
|
383
|
+
# self, endpoint_id: str
|
|
384
|
+
# ) -> typing.Optional[mm_schemas.ModelEndpointMonitoringMetric]:
|
|
385
|
+
# """
|
|
386
|
+
# Read the "invocations" metric for the provided model endpoint, and return the metric object
|
|
387
|
+
# if it exists.
|
|
388
|
+
#
|
|
389
|
+
# :param endpoint_id: The model endpoint identifier.
|
|
390
|
+
# :return: `None` if the invocations metric does not exist, otherwise return the
|
|
391
|
+
# corresponding metric object.
|
|
392
|
+
# """
|
|
393
|
+
# # Read just one record, because we just want to check if there is any data for this endpoint_id
|
|
394
|
+
# predictions = self.read_predictions(
|
|
395
|
+
# endpoint_id=endpoint_id,
|
|
396
|
+
# start=datetime.min,
|
|
397
|
+
# end=mlrun.utils.now_date(),
|
|
398
|
+
# limit=1,
|
|
399
|
+
# )
|
|
400
|
+
# if predictions:
|
|
401
|
+
# return get_invocations_metric(self.project)
|
|
@@ -12,15 +12,13 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
-
import typing
|
|
16
15
|
from datetime import datetime
|
|
17
16
|
from io import StringIO
|
|
18
17
|
from typing import Literal, Optional, Union
|
|
19
18
|
|
|
20
19
|
import pandas as pd
|
|
20
|
+
import v3io_frames
|
|
21
21
|
import v3io_frames.client
|
|
22
|
-
import v3io_frames.errors
|
|
23
|
-
from v3io_frames.frames_pb2 import IGNORE
|
|
24
22
|
|
|
25
23
|
import mlrun.common.model_monitoring
|
|
26
24
|
import mlrun.common.schemas.model_monitoring as mm_schemas
|
|
@@ -35,6 +33,14 @@ _TSDB_RATE = "1/s"
|
|
|
35
33
|
_CONTAINER = "users"
|
|
36
34
|
|
|
37
35
|
|
|
36
|
+
def _is_no_schema_error(exc: v3io_frames.ReadError) -> bool:
|
|
37
|
+
"""
|
|
38
|
+
In case of a nonexistent TSDB table - a `v3io_frames.ReadError` error is raised.
|
|
39
|
+
Check if the error message contains the relevant string to verify the cause.
|
|
40
|
+
"""
|
|
41
|
+
return "No TSDB schema file found" in str(exc)
|
|
42
|
+
|
|
43
|
+
|
|
38
44
|
class V3IOTSDBConnector(TSDBConnector):
|
|
39
45
|
"""
|
|
40
46
|
Handles the TSDB operations when the TSDB connector is of type V3IO. To manage these operations we use V3IO Frames
|
|
@@ -47,7 +53,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
47
53
|
self,
|
|
48
54
|
project: str,
|
|
49
55
|
container: str = _CONTAINER,
|
|
50
|
-
v3io_framesd:
|
|
56
|
+
v3io_framesd: Optional[str] = None,
|
|
51
57
|
create_table: bool = False,
|
|
52
58
|
) -> None:
|
|
53
59
|
super().__init__(project=project)
|
|
@@ -132,7 +138,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
132
138
|
self._frames_client.create(
|
|
133
139
|
backend=_TSDB_BE,
|
|
134
140
|
table=table,
|
|
135
|
-
if_exists=IGNORE,
|
|
141
|
+
if_exists=v3io_frames.IGNORE,
|
|
136
142
|
rate=_TSDB_RATE,
|
|
137
143
|
)
|
|
138
144
|
|
|
@@ -162,7 +168,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
162
168
|
time_col=mm_schemas.EventFieldType.TIMESTAMP,
|
|
163
169
|
container=self.container,
|
|
164
170
|
v3io_frames=self.v3io_framesd,
|
|
165
|
-
columns=[
|
|
171
|
+
columns=[mm_schemas.EventFieldType.LATENCY],
|
|
166
172
|
index_cols=[
|
|
167
173
|
mm_schemas.EventFieldType.ENDPOINT_ID,
|
|
168
174
|
],
|
|
@@ -280,7 +286,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
280
286
|
index_cols=index_cols,
|
|
281
287
|
)
|
|
282
288
|
logger.info("Updated V3IO TSDB successfully", table=table)
|
|
283
|
-
except v3io_frames.
|
|
289
|
+
except v3io_frames.Error as err:
|
|
284
290
|
logger.exception(
|
|
285
291
|
"Could not write drift measures to TSDB",
|
|
286
292
|
err=err,
|
|
@@ -291,7 +297,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
291
297
|
f"Failed to write application result to TSDB: {err}"
|
|
292
298
|
)
|
|
293
299
|
|
|
294
|
-
def delete_tsdb_resources(self, table:
|
|
300
|
+
def delete_tsdb_resources(self, table: Optional[str] = None):
|
|
295
301
|
if table:
|
|
296
302
|
# Delete a specific table
|
|
297
303
|
tables = [table]
|
|
@@ -301,7 +307,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
301
307
|
for table_to_delete in tables:
|
|
302
308
|
try:
|
|
303
309
|
self._frames_client.delete(backend=_TSDB_BE, table=table_to_delete)
|
|
304
|
-
except v3io_frames.
|
|
310
|
+
except v3io_frames.DeleteError as e:
|
|
305
311
|
logger.warning(
|
|
306
312
|
f"Failed to delete TSDB table '{table}'",
|
|
307
313
|
err=mlrun.errors.err_to_str(e),
|
|
@@ -362,7 +368,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
362
368
|
]
|
|
363
369
|
metrics_mapping[metric] = values
|
|
364
370
|
|
|
365
|
-
except v3io_frames.
|
|
371
|
+
except v3io_frames.Error as err:
|
|
366
372
|
logger.warn("Failed to read tsdb", err=err, endpoint=endpoint_id)
|
|
367
373
|
|
|
368
374
|
return metrics_mapping
|
|
@@ -372,12 +378,11 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
372
378
|
table: str,
|
|
373
379
|
start: Union[datetime, str],
|
|
374
380
|
end: Union[datetime, str],
|
|
375
|
-
columns:
|
|
381
|
+
columns: Optional[list[str]] = None,
|
|
376
382
|
filter_query: str = "",
|
|
377
|
-
interval:
|
|
378
|
-
agg_funcs:
|
|
379
|
-
|
|
380
|
-
sliding_window_step: typing.Optional[str] = None,
|
|
383
|
+
interval: Optional[str] = None,
|
|
384
|
+
agg_funcs: Optional[list[str]] = None,
|
|
385
|
+
sliding_window_step: Optional[str] = None,
|
|
381
386
|
**kwargs,
|
|
382
387
|
) -> pd.DataFrame:
|
|
383
388
|
"""
|
|
@@ -400,7 +405,6 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
400
405
|
:param agg_funcs: The aggregation functions to apply on the columns. Note that if `agg_funcs` is
|
|
401
406
|
provided, `interval` must bg provided as well. Provided as a list of strings in
|
|
402
407
|
the format of ['sum', 'avg', 'count', ...].
|
|
403
|
-
:param limit: The maximum number of records to return.
|
|
404
408
|
:param sliding_window_step: The time step for which the time window moves forward. Note that if
|
|
405
409
|
`sliding_window_step` is provided, interval must be provided as well. Provided
|
|
406
410
|
as a string in the format of '1m', '1h', etc.
|
|
@@ -416,7 +420,7 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
416
420
|
|
|
417
421
|
if agg_funcs:
|
|
418
422
|
# Frames client expects the aggregators to be a comma-separated string
|
|
419
|
-
|
|
423
|
+
aggregators = ",".join(agg_funcs)
|
|
420
424
|
table_path = self.tables[table]
|
|
421
425
|
try:
|
|
422
426
|
df = self._frames_client.read(
|
|
@@ -427,18 +431,16 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
427
431
|
columns=columns,
|
|
428
432
|
filter=filter_query,
|
|
429
433
|
aggregation_window=interval,
|
|
430
|
-
aggregators=
|
|
434
|
+
aggregators=aggregators,
|
|
431
435
|
step=sliding_window_step,
|
|
432
436
|
**kwargs,
|
|
433
437
|
)
|
|
434
438
|
except v3io_frames.ReadError as err:
|
|
435
|
-
if
|
|
439
|
+
if _is_no_schema_error(err):
|
|
436
440
|
return pd.DataFrame()
|
|
437
441
|
else:
|
|
438
442
|
raise err
|
|
439
443
|
|
|
440
|
-
if limit:
|
|
441
|
-
df = df.head(limit)
|
|
442
444
|
return df
|
|
443
445
|
|
|
444
446
|
def _get_v3io_source_directory(self) -> str:
|
|
@@ -509,8 +511,8 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
509
511
|
raise ValueError(f"Invalid {type = }")
|
|
510
512
|
|
|
511
513
|
query = self._get_sql_query(
|
|
512
|
-
endpoint_id,
|
|
513
|
-
[(metric.app, metric.name) for metric in metrics],
|
|
514
|
+
endpoint_id=endpoint_id,
|
|
515
|
+
metric_and_app_names=[(metric.app, metric.name) for metric in metrics],
|
|
514
516
|
table_path=table_path,
|
|
515
517
|
name=name,
|
|
516
518
|
)
|
|
@@ -536,21 +538,28 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
536
538
|
|
|
537
539
|
@staticmethod
|
|
538
540
|
def _get_sql_query(
|
|
541
|
+
*,
|
|
539
542
|
endpoint_id: str,
|
|
540
|
-
names: list[tuple[str, str]],
|
|
541
543
|
table_path: str,
|
|
542
544
|
name: str = mm_schemas.ResultData.RESULT_NAME,
|
|
545
|
+
metric_and_app_names: Optional[list[tuple[str, str]]] = None,
|
|
546
|
+
columns: Optional[list[str]] = None,
|
|
543
547
|
) -> str:
|
|
544
548
|
"""Get the SQL query for the results/metrics table"""
|
|
549
|
+
if columns:
|
|
550
|
+
selection = ",".join(columns)
|
|
551
|
+
else:
|
|
552
|
+
selection = "*"
|
|
553
|
+
|
|
545
554
|
with StringIO() as query:
|
|
546
555
|
query.write(
|
|
547
|
-
f"SELECT
|
|
556
|
+
f"SELECT {selection} FROM '{table_path}' "
|
|
548
557
|
f"WHERE {mm_schemas.WriterEvent.ENDPOINT_ID}='{endpoint_id}'"
|
|
549
558
|
)
|
|
550
|
-
if
|
|
559
|
+
if metric_and_app_names:
|
|
551
560
|
query.write(" AND (")
|
|
552
561
|
|
|
553
|
-
for i, (app_name, result_name) in enumerate(
|
|
562
|
+
for i, (app_name, result_name) in enumerate(metric_and_app_names):
|
|
554
563
|
sub_cond = (
|
|
555
564
|
f"({mm_schemas.WriterEvent.APPLICATION_NAME}='{app_name}' "
|
|
556
565
|
f"AND {name}='{result_name}')"
|
|
@@ -572,7 +581,6 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
572
581
|
end: Union[datetime, str],
|
|
573
582
|
aggregation_window: Optional[str] = None,
|
|
574
583
|
agg_funcs: Optional[list[str]] = None,
|
|
575
|
-
limit: Optional[int] = None,
|
|
576
584
|
) -> Union[
|
|
577
585
|
mm_schemas.ModelEndpointMonitoringMetricNoData,
|
|
578
586
|
mm_schemas.ModelEndpointMonitoringMetricValues,
|
|
@@ -591,7 +599,6 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
591
599
|
filter_query=f"endpoint_id=='{endpoint_id}'",
|
|
592
600
|
interval=aggregation_window,
|
|
593
601
|
agg_funcs=agg_funcs,
|
|
594
|
-
limit=limit,
|
|
595
602
|
sliding_window_step=aggregation_window,
|
|
596
603
|
)
|
|
597
604
|
|
|
@@ -619,18 +626,33 @@ class V3IOTSDBConnector(TSDBConnector):
|
|
|
619
626
|
), # pyright: ignore[reportArgumentType]
|
|
620
627
|
)
|
|
621
628
|
|
|
622
|
-
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
|
|
627
|
-
|
|
628
|
-
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
|
|
632
|
-
|
|
633
|
-
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
|
|
629
|
+
# Note: this function serves as a reference for checking the TSDB for the existence of a metric.
|
|
630
|
+
#
|
|
631
|
+
# def read_prediction_metric_for_endpoint_if_exists(
|
|
632
|
+
# self, endpoint_id: str
|
|
633
|
+
# ) -> Optional[mm_schemas.ModelEndpointMonitoringMetric]:
|
|
634
|
+
# """
|
|
635
|
+
# Read the count of the latency column in the predictions table for the given endpoint_id.
|
|
636
|
+
# We just want to check if there is any data for this endpoint_id.
|
|
637
|
+
# """
|
|
638
|
+
# query = self._get_sql_query(
|
|
639
|
+
# endpoint_id=endpoint_id,
|
|
640
|
+
# table_path=self.tables[mm_schemas.FileTargetKind.PREDICTIONS],
|
|
641
|
+
# columns=[f"count({mm_schemas.EventFieldType.LATENCY})"],
|
|
642
|
+
# )
|
|
643
|
+
# try:
|
|
644
|
+
# logger.debug("Checking TSDB", project=self.project, query=query)
|
|
645
|
+
# df: pd.DataFrame = self._frames_client.read(
|
|
646
|
+
# backend=_TSDB_BE, query=query, start="0", end="now"
|
|
647
|
+
# )
|
|
648
|
+
# except v3io_frames.ReadError as err:
|
|
649
|
+
# if _is_no_schema_error(err):
|
|
650
|
+
# logger.debug(
|
|
651
|
+
# "No predictions yet", project=self.project, endpoint_id=endpoint_id
|
|
652
|
+
# )
|
|
653
|
+
# return
|
|
654
|
+
# else:
|
|
655
|
+
# raise
|
|
656
|
+
#
|
|
657
|
+
# if not df.empty:
|
|
658
|
+
# return get_invocations_metric(self.project)
|
|
@@ -25,6 +25,7 @@ from mlrun.common.schemas.model_monitoring import (
|
|
|
25
25
|
EventFieldType,
|
|
26
26
|
)
|
|
27
27
|
from mlrun.common.schemas.model_monitoring.model_endpoints import (
|
|
28
|
+
ModelEndpointMonitoringMetric,
|
|
28
29
|
ModelEndpointMonitoringMetricType,
|
|
29
30
|
_compose_full_name,
|
|
30
31
|
)
|
|
@@ -305,3 +306,19 @@ def get_invocations_fqn(project: str) -> str:
|
|
|
305
306
|
name=mm_constants.PredictionsQueryConstants.INVOCATIONS,
|
|
306
307
|
type=ModelEndpointMonitoringMetricType.METRIC,
|
|
307
308
|
)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def get_invocations_metric(project: str) -> ModelEndpointMonitoringMetric:
|
|
312
|
+
"""
|
|
313
|
+
Return the invocations metric of any model endpoint in the given project.
|
|
314
|
+
|
|
315
|
+
:param project: The project name.
|
|
316
|
+
:returns: The model monitoring metric object.
|
|
317
|
+
"""
|
|
318
|
+
return ModelEndpointMonitoringMetric(
|
|
319
|
+
project=project,
|
|
320
|
+
app=mm_constants.SpecialApps.MLRUN_INFRA,
|
|
321
|
+
type=ModelEndpointMonitoringMetricType.METRIC,
|
|
322
|
+
name=mm_constants.PredictionsQueryConstants.INVOCATIONS,
|
|
323
|
+
full_name=get_invocations_fqn(project),
|
|
324
|
+
)
|
mlrun/package/__init__.py
CHANGED
|
@@ -12,7 +12,19 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
|
-
|
|
15
|
+
"""
|
|
16
|
+
MLRun package enables fully-automated experiment and pipeline tracking and reproducibility, and easy passing of
|
|
17
|
+
python objects between remote jobs, while not requiring any form of editing to the actual function original code.
|
|
18
|
+
Simply set the function code in a project and run it, MLRun takes care of the rest.
|
|
19
|
+
|
|
20
|
+
MLRun uses packagers: classes that perform 2 tasks:
|
|
21
|
+
|
|
22
|
+
#. **Parsing inputs** - automatically cast the runtime's inputs (user's input passed to the function via
|
|
23
|
+
the ``inputs`` parameter of the ``run`` method) to the relevant hinted type. (Does not require handling of data items.)
|
|
24
|
+
#. **Logging outputs** - automatically save, log, and upload the function's returned objects by the provided
|
|
25
|
+
log hints (user's input passed to the function via the ``returns`` parameter of the ``run`` method).
|
|
26
|
+
(Does not require handling of files and artifacts.)
|
|
27
|
+
"""
|
|
16
28
|
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
17
29
|
|
|
18
30
|
import functools
|
|
@@ -12,7 +12,12 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
#
|
|
15
|
-
|
|
15
|
+
r"""
|
|
16
|
+
MLRun comes with the following list of modules, out of the box. All of the packagers listed here
|
|
17
|
+
use the implementation of :ref:`DefaultPackager <mlrun.package.packagers.default\_packager.DefaultPackager>` and are
|
|
18
|
+
available by default at the start of each run.
|
|
19
|
+
"""
|
|
16
20
|
# flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
|
|
21
|
+
|
|
17
22
|
from .default_packager import DefaultPackager
|
|
18
23
|
from .numpy_packagers import NumPySupportedFormat
|
mlrun/projects/operations.py
CHANGED
|
@@ -77,6 +77,7 @@ def run_function(
|
|
|
77
77
|
notifications: list[mlrun.model.Notification] = None,
|
|
78
78
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
79
79
|
builder_env: Optional[list] = None,
|
|
80
|
+
reset_on_run: Optional[bool] = None,
|
|
80
81
|
) -> Union[mlrun.model.RunObject, PipelineNodeWrapper]:
|
|
81
82
|
"""Run a local or remote task as part of a local/kubeflow pipeline
|
|
82
83
|
|
|
@@ -167,6 +168,9 @@ def run_function(
|
|
|
167
168
|
artifact type can be given there. The artifact key must appear in the dictionary as
|
|
168
169
|
"key": "the_key".
|
|
169
170
|
:param builder_env: env vars dict for source archive config/credentials e.g. builder_env={"GIT_TOKEN": token}
|
|
171
|
+
:param reset_on_run: When True, function python modules would reload prior to code execution.
|
|
172
|
+
This ensures latest code changes are executed. This argument must be used in
|
|
173
|
+
conjunction with the local=True argument.
|
|
170
174
|
:return: MLRun RunObject or PipelineNodeWrapper
|
|
171
175
|
"""
|
|
172
176
|
engine, function = _get_engine_and_function(function, project_object)
|
|
@@ -215,6 +219,7 @@ def run_function(
|
|
|
215
219
|
schedule=schedule,
|
|
216
220
|
notifications=notifications,
|
|
217
221
|
builder_env=builder_env,
|
|
222
|
+
reset_on_run=reset_on_run,
|
|
218
223
|
)
|
|
219
224
|
if run_result:
|
|
220
225
|
run_result._notified = False
|
mlrun/projects/project.py
CHANGED
|
@@ -3258,6 +3258,7 @@ class MlrunProject(ModelObj):
|
|
|
3258
3258
|
notifications: list[mlrun.model.Notification] = None,
|
|
3259
3259
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
3260
3260
|
builder_env: Optional[dict] = None,
|
|
3261
|
+
reset_on_run: bool = None,
|
|
3261
3262
|
) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
|
|
3262
3263
|
"""Run a local or remote task as part of a local/kubeflow pipeline
|
|
3263
3264
|
|
|
@@ -3314,6 +3315,10 @@ class MlrunProject(ModelObj):
|
|
|
3314
3315
|
artifact type can be given there. The artifact key must appear in the dictionary as
|
|
3315
3316
|
"key": "the_key".
|
|
3316
3317
|
:param builder_env: env vars dict for source archive config/credentials e.g. builder_env={"GIT_TOKEN": token}
|
|
3318
|
+
:param reset_on_run: When True, function python modules would reload prior to code execution.
|
|
3319
|
+
This ensures latest code changes are executed. This argument must be used in
|
|
3320
|
+
conjunction with the local=True argument.
|
|
3321
|
+
|
|
3317
3322
|
:return: MLRun RunObject or PipelineNodeWrapper
|
|
3318
3323
|
"""
|
|
3319
3324
|
return run_function(
|
|
@@ -3339,6 +3344,7 @@ class MlrunProject(ModelObj):
|
|
|
3339
3344
|
notifications=notifications,
|
|
3340
3345
|
returns=returns,
|
|
3341
3346
|
builder_env=builder_env,
|
|
3347
|
+
reset_on_run=reset_on_run,
|
|
3342
3348
|
)
|
|
3343
3349
|
|
|
3344
3350
|
def build_function(
|
mlrun/runtimes/base.py
CHANGED
|
@@ -68,6 +68,7 @@ spec_fields = [
|
|
|
68
68
|
"disable_auto_mount",
|
|
69
69
|
"allow_empty_resources",
|
|
70
70
|
"clone_target_dir",
|
|
71
|
+
"reset_on_run",
|
|
71
72
|
]
|
|
72
73
|
|
|
73
74
|
|
|
@@ -336,6 +337,7 @@ class BaseRuntime(ModelObj):
|
|
|
336
337
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
337
338
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
338
339
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
340
|
+
reset_on_run: Optional[bool] = None,
|
|
339
341
|
**launcher_kwargs,
|
|
340
342
|
) -> RunObject:
|
|
341
343
|
"""
|
|
@@ -390,6 +392,9 @@ class BaseRuntime(ModelObj):
|
|
|
390
392
|
standards and is at least 1 minute (-1 for infinite).
|
|
391
393
|
If the phase is active for longer than the threshold, the run will be aborted.
|
|
392
394
|
See mlconf.function.spec.state_thresholds for the state options and default values.
|
|
395
|
+
:param reset_on_run: When True, function python modules would reload prior to code execution.
|
|
396
|
+
This ensures latest code changes are executed. This argument must be used in
|
|
397
|
+
conjunction with the local=True argument.
|
|
393
398
|
:return: Run context object (RunObject) with run metadata, results and status
|
|
394
399
|
"""
|
|
395
400
|
launcher = mlrun.launcher.factory.LauncherFactory().create_launcher(
|
|
@@ -418,6 +423,7 @@ class BaseRuntime(ModelObj):
|
|
|
418
423
|
notifications=notifications,
|
|
419
424
|
returns=returns,
|
|
420
425
|
state_thresholds=state_thresholds,
|
|
426
|
+
reset_on_run=reset_on_run,
|
|
421
427
|
)
|
|
422
428
|
|
|
423
429
|
def _get_db_run(self, task: RunObject = None):
|
mlrun/runtimes/daskjob.py
CHANGED
|
@@ -494,6 +494,7 @@ class DaskCluster(KubejobRuntime):
|
|
|
494
494
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
495
495
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
496
496
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
497
|
+
reset_on_run: Optional[bool] = None,
|
|
497
498
|
**launcher_kwargs,
|
|
498
499
|
) -> RunObject:
|
|
499
500
|
if state_thresholds:
|
|
@@ -232,6 +232,7 @@ def run_mlrun_databricks_job(context,task_parameters: dict, **kwargs):
|
|
|
232
232
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
233
233
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
234
234
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
235
|
+
reset_on_run: Optional[bool] = None,
|
|
235
236
|
**launcher_kwargs,
|
|
236
237
|
) -> RunObject:
|
|
237
238
|
if local:
|
mlrun/runtimes/local.py
CHANGED
|
@@ -391,7 +391,13 @@ def load_module(file_name, handler, context):
|
|
|
391
391
|
if context:
|
|
392
392
|
class_args = copy(context._parameters.get("_init_args", {}))
|
|
393
393
|
|
|
394
|
-
return get_handler_extended(
|
|
394
|
+
return get_handler_extended(
|
|
395
|
+
handler,
|
|
396
|
+
context,
|
|
397
|
+
class_args,
|
|
398
|
+
namespaces=module,
|
|
399
|
+
reload_modules=context._reset_on_run,
|
|
400
|
+
)
|
|
395
401
|
|
|
396
402
|
|
|
397
403
|
def run_exec(cmd, args, env=None, cwd=None):
|
mlrun/serving/__init__.py
CHANGED
|
@@ -22,10 +22,17 @@ __all__ = [
|
|
|
22
22
|
"RouterStep",
|
|
23
23
|
"QueueStep",
|
|
24
24
|
"ErrorStep",
|
|
25
|
+
"MonitoringApplicationStep",
|
|
25
26
|
]
|
|
26
27
|
|
|
27
28
|
from .routers import ModelRouter, VotingEnsemble # noqa
|
|
28
29
|
from .server import GraphContext, GraphServer, create_graph_server # noqa
|
|
29
|
-
from .states import
|
|
30
|
+
from .states import (
|
|
31
|
+
ErrorStep,
|
|
32
|
+
QueueStep,
|
|
33
|
+
RouterStep,
|
|
34
|
+
TaskStep,
|
|
35
|
+
MonitoringApplicationStep,
|
|
36
|
+
) # noqa
|
|
30
37
|
from .v1_serving import MLModelServer, new_v1_model_server # noqa
|
|
31
38
|
from .v2_serving import V2ModelServer # noqa
|
mlrun/serving/states.py
CHANGED
|
@@ -12,7 +12,13 @@
|
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
|
-
__all__ = [
|
|
15
|
+
__all__ = [
|
|
16
|
+
"TaskStep",
|
|
17
|
+
"RouterStep",
|
|
18
|
+
"RootFlowStep",
|
|
19
|
+
"ErrorStep",
|
|
20
|
+
"MonitoringApplicationStep",
|
|
21
|
+
]
|
|
16
22
|
|
|
17
23
|
import os
|
|
18
24
|
import pathlib
|
|
@@ -55,6 +61,7 @@ class StepKinds:
|
|
|
55
61
|
choice = "choice"
|
|
56
62
|
root = "root"
|
|
57
63
|
error_step = "error_step"
|
|
64
|
+
monitoring_application = "monitoring_application"
|
|
58
65
|
|
|
59
66
|
|
|
60
67
|
_task_step_fields = [
|
|
@@ -485,13 +492,15 @@ class TaskStep(BaseStep):
|
|
|
485
492
|
class_args[key] = arg
|
|
486
493
|
class_args.update(extra_kwargs)
|
|
487
494
|
|
|
488
|
-
|
|
489
|
-
|
|
490
|
-
|
|
491
|
-
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
+
if not isinstance(self, MonitoringApplicationStep):
|
|
496
|
+
# add common args (name, context, ..) only if target class can accept them
|
|
497
|
+
argspec = getfullargspec(class_object)
|
|
498
|
+
|
|
499
|
+
for key in ["name", "context", "input_path", "result_path", "full_event"]:
|
|
500
|
+
if argspec.varkw or key in argspec.args:
|
|
501
|
+
class_args[key] = getattr(self, key)
|
|
502
|
+
if argspec.varkw or "graph_step" in argspec.args:
|
|
503
|
+
class_args["graph_step"] = self
|
|
495
504
|
return class_args
|
|
496
505
|
|
|
497
506
|
def get_step_class_object(self, namespace):
|
|
@@ -582,6 +591,39 @@ class TaskStep(BaseStep):
|
|
|
582
591
|
return event
|
|
583
592
|
|
|
584
593
|
|
|
594
|
+
class MonitoringApplicationStep(TaskStep):
|
|
595
|
+
"""monitoring application execution step, runs users class code"""
|
|
596
|
+
|
|
597
|
+
kind = "monitoring_application"
|
|
598
|
+
_default_class = ""
|
|
599
|
+
|
|
600
|
+
def __init__(
|
|
601
|
+
self,
|
|
602
|
+
class_name: Union[str, type] = None,
|
|
603
|
+
class_args: dict = None,
|
|
604
|
+
handler: str = None,
|
|
605
|
+
name: str = None,
|
|
606
|
+
after: list = None,
|
|
607
|
+
full_event: bool = None,
|
|
608
|
+
function: str = None,
|
|
609
|
+
responder: bool = None,
|
|
610
|
+
input_path: str = None,
|
|
611
|
+
result_path: str = None,
|
|
612
|
+
):
|
|
613
|
+
super().__init__(
|
|
614
|
+
class_name=class_name,
|
|
615
|
+
class_args=class_args,
|
|
616
|
+
handler=handler,
|
|
617
|
+
name=name,
|
|
618
|
+
after=after,
|
|
619
|
+
full_event=full_event,
|
|
620
|
+
function=function,
|
|
621
|
+
responder=responder,
|
|
622
|
+
input_path=input_path,
|
|
623
|
+
result_path=result_path,
|
|
624
|
+
)
|
|
625
|
+
|
|
626
|
+
|
|
585
627
|
class ErrorStep(TaskStep):
|
|
586
628
|
"""error execution step, runs a class or handler"""
|
|
587
629
|
|
|
@@ -1323,6 +1365,7 @@ classes_map = {
|
|
|
1323
1365
|
"flow": FlowStep,
|
|
1324
1366
|
"queue": QueueStep,
|
|
1325
1367
|
"error_step": ErrorStep,
|
|
1368
|
+
"monitoring_application": MonitoringApplicationStep,
|
|
1326
1369
|
}
|
|
1327
1370
|
|
|
1328
1371
|
|
mlrun/serving/utils.py
CHANGED
|
@@ -46,6 +46,15 @@ def _update_result_body(result_path, event_body, result):
|
|
|
46
46
|
class StepToDict:
|
|
47
47
|
"""auto serialization of graph steps to a python dictionary"""
|
|
48
48
|
|
|
49
|
+
meta_keys = [
|
|
50
|
+
"context",
|
|
51
|
+
"name",
|
|
52
|
+
"input_path",
|
|
53
|
+
"result_path",
|
|
54
|
+
"full_event",
|
|
55
|
+
"kwargs",
|
|
56
|
+
]
|
|
57
|
+
|
|
49
58
|
def to_dict(self, fields: list = None, exclude: list = None, strip: bool = False):
|
|
50
59
|
"""convert the step object to a python dictionary"""
|
|
51
60
|
fields = fields or getattr(self, "_dict_fields", None)
|
|
@@ -54,24 +63,16 @@ class StepToDict:
|
|
|
54
63
|
if exclude:
|
|
55
64
|
fields = [field for field in fields if field not in exclude]
|
|
56
65
|
|
|
57
|
-
meta_keys = [
|
|
58
|
-
"context",
|
|
59
|
-
"name",
|
|
60
|
-
"input_path",
|
|
61
|
-
"result_path",
|
|
62
|
-
"full_event",
|
|
63
|
-
"kwargs",
|
|
64
|
-
]
|
|
65
66
|
args = {
|
|
66
67
|
key: getattr(self, key)
|
|
67
68
|
for key in fields
|
|
68
|
-
if getattr(self, key, None) is not None and key not in meta_keys
|
|
69
|
+
if getattr(self, key, None) is not None and key not in self.meta_keys
|
|
69
70
|
}
|
|
70
71
|
# add storey kwargs or extra kwargs
|
|
71
72
|
if "kwargs" in fields and (hasattr(self, "kwargs") or hasattr(self, "_kwargs")):
|
|
72
73
|
kwargs = getattr(self, "kwargs", {}) or getattr(self, "_kwargs", {})
|
|
73
74
|
for key, value in kwargs.items():
|
|
74
|
-
if key not in meta_keys:
|
|
75
|
+
if key not in self.meta_keys:
|
|
75
76
|
args[key] = value
|
|
76
77
|
|
|
77
78
|
mod_name = self.__class__.__module__
|
|
@@ -80,7 +81,9 @@ class StepToDict:
|
|
|
80
81
|
class_path = f"{mod_name}.{class_path}"
|
|
81
82
|
struct = {
|
|
82
83
|
"class_name": class_path,
|
|
83
|
-
"name": self.name
|
|
84
|
+
"name": self.name
|
|
85
|
+
if hasattr(self, "name") and self.name
|
|
86
|
+
else self.__class__.__name__,
|
|
84
87
|
"class_args": args,
|
|
85
88
|
}
|
|
86
89
|
if hasattr(self, "_STEP_KIND"):
|
|
@@ -94,6 +97,11 @@ class StepToDict:
|
|
|
94
97
|
return struct
|
|
95
98
|
|
|
96
99
|
|
|
100
|
+
class MonitoringApplicationToDict(StepToDict):
|
|
101
|
+
_STEP_KIND = "monitoring_application"
|
|
102
|
+
meta_keys = []
|
|
103
|
+
|
|
104
|
+
|
|
97
105
|
class RouterToDict(StepToDict):
|
|
98
106
|
_STEP_KIND = "router"
|
|
99
107
|
|
mlrun/utils/helpers.py
CHANGED
|
@@ -26,7 +26,7 @@ import sys
|
|
|
26
26
|
import typing
|
|
27
27
|
import warnings
|
|
28
28
|
from datetime import datetime, timezone
|
|
29
|
-
from importlib import import_module
|
|
29
|
+
from importlib import import_module, reload
|
|
30
30
|
from os import path
|
|
31
31
|
from types import ModuleType
|
|
32
32
|
from typing import Any, Optional
|
|
@@ -1019,16 +1019,35 @@ def create_class(pkg_class: str):
|
|
|
1019
1019
|
return class_
|
|
1020
1020
|
|
|
1021
1021
|
|
|
1022
|
-
def create_function(pkg_func: str):
|
|
1022
|
+
def create_function(pkg_func: str, reload_modules: bool = False):
|
|
1023
1023
|
"""Create a function from a package.module.function string
|
|
1024
1024
|
|
|
1025
1025
|
:param pkg_func: full function location,
|
|
1026
1026
|
e.g. "sklearn.feature_selection.f_classif"
|
|
1027
|
+
:param reload_modules: reload the function again.
|
|
1027
1028
|
"""
|
|
1028
1029
|
splits = pkg_func.split(".")
|
|
1029
1030
|
pkg_module = ".".join(splits[:-1])
|
|
1030
1031
|
cb_fname = splits[-1]
|
|
1031
1032
|
pkg_module = __import__(pkg_module, fromlist=[cb_fname])
|
|
1033
|
+
|
|
1034
|
+
if reload_modules:
|
|
1035
|
+
# Even though the function appears in the modules list, we need to reload
|
|
1036
|
+
# the code again because it may have changed
|
|
1037
|
+
try:
|
|
1038
|
+
logger.debug("Reloading module", module=pkg_func)
|
|
1039
|
+
_reload(
|
|
1040
|
+
pkg_module,
|
|
1041
|
+
max_recursion_depth=mlrun.mlconf.function.spec.reload_max_recursion_depth,
|
|
1042
|
+
)
|
|
1043
|
+
except Exception as exc:
|
|
1044
|
+
logger.warning(
|
|
1045
|
+
"Failed to reload module. Not all associated modules can be reloaded, import them manually."
|
|
1046
|
+
"Or, with Jupyter, restart the Python kernel.",
|
|
1047
|
+
module=pkg_func,
|
|
1048
|
+
err=mlrun.errors.err_to_str(exc),
|
|
1049
|
+
)
|
|
1050
|
+
|
|
1032
1051
|
function_ = getattr(pkg_module, cb_fname)
|
|
1033
1052
|
return function_
|
|
1034
1053
|
|
|
@@ -1086,8 +1105,14 @@ def get_class(class_name, namespace=None):
|
|
|
1086
1105
|
return class_object
|
|
1087
1106
|
|
|
1088
1107
|
|
|
1089
|
-
def get_function(function,
|
|
1090
|
-
"""
|
|
1108
|
+
def get_function(function, namespaces, reload_modules: bool = False):
|
|
1109
|
+
"""Return function callable object from function name string
|
|
1110
|
+
|
|
1111
|
+
:param function: path to the function ([class_name::]function)
|
|
1112
|
+
:param namespaces: one or list of namespaces/modules to search the function in
|
|
1113
|
+
:param reload_modules: reload the function again
|
|
1114
|
+
:return: function handler (callable)
|
|
1115
|
+
"""
|
|
1091
1116
|
if callable(function):
|
|
1092
1117
|
return function
|
|
1093
1118
|
|
|
@@ -1096,12 +1121,12 @@ def get_function(function, namespace):
|
|
|
1096
1121
|
if not function.endswith(")"):
|
|
1097
1122
|
raise ValueError('function expression must start with "(" and end with ")"')
|
|
1098
1123
|
return eval("lambda event: " + function[1:-1], {}, {})
|
|
1099
|
-
function_object = _search_in_namespaces(function,
|
|
1124
|
+
function_object = _search_in_namespaces(function, namespaces)
|
|
1100
1125
|
if function_object is not None:
|
|
1101
1126
|
return function_object
|
|
1102
1127
|
|
|
1103
1128
|
try:
|
|
1104
|
-
function_object = create_function(function)
|
|
1129
|
+
function_object = create_function(function, reload_modules)
|
|
1105
1130
|
except (ImportError, ValueError) as exc:
|
|
1106
1131
|
raise ImportError(
|
|
1107
1132
|
f"state/function init failed, handler '{function}' not found"
|
|
@@ -1110,19 +1135,24 @@ def get_function(function, namespace):
|
|
|
1110
1135
|
|
|
1111
1136
|
|
|
1112
1137
|
def get_handler_extended(
|
|
1113
|
-
handler_path: str,
|
|
1138
|
+
handler_path: str,
|
|
1139
|
+
context=None,
|
|
1140
|
+
class_args: dict = None,
|
|
1141
|
+
namespaces=None,
|
|
1142
|
+
reload_modules: bool = False,
|
|
1114
1143
|
):
|
|
1115
|
-
"""
|
|
1144
|
+
"""Get function handler from [class_name::]handler string
|
|
1116
1145
|
|
|
1117
1146
|
:param handler_path: path to the function ([class_name::]handler)
|
|
1118
1147
|
:param context: MLRun function/job client context
|
|
1119
1148
|
:param class_args: optional dict of class init kwargs
|
|
1120
1149
|
:param namespaces: one or list of namespaces/modules to search the handler in
|
|
1150
|
+
:param reload_modules: reload the function again
|
|
1121
1151
|
:return: function handler (callable)
|
|
1122
1152
|
"""
|
|
1123
1153
|
class_args = class_args or {}
|
|
1124
1154
|
if "::" not in handler_path:
|
|
1125
|
-
return get_function(handler_path, namespaces)
|
|
1155
|
+
return get_function(handler_path, namespaces, reload_modules)
|
|
1126
1156
|
|
|
1127
1157
|
splitted = handler_path.split("::")
|
|
1128
1158
|
class_path = splitted[0].strip()
|
|
@@ -1628,3 +1658,15 @@ def format_alert_summary(
|
|
|
1628
1658
|
result = result.replace("{{name}}", alert.name)
|
|
1629
1659
|
result = result.replace("{{entity}}", event_data.entity.ids[0])
|
|
1630
1660
|
return result
|
|
1661
|
+
|
|
1662
|
+
|
|
1663
|
+
def _reload(module, max_recursion_depth):
|
|
1664
|
+
"""Recursively reload modules."""
|
|
1665
|
+
if max_recursion_depth <= 0:
|
|
1666
|
+
return
|
|
1667
|
+
|
|
1668
|
+
reload(module)
|
|
1669
|
+
for attribute_name in dir(module):
|
|
1670
|
+
attribute = getattr(module, attribute_name)
|
|
1671
|
+
if type(attribute) is ModuleType:
|
|
1672
|
+
_reload(attribute, max_recursion_depth - 1)
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,17 +1,17 @@
|
|
|
1
1
|
mlrun/__init__.py,sha256=y08M1JcKXy5-9_5WaI9fn5aV5BxIQ5QkbduJK0OxWbA,7470
|
|
2
2
|
mlrun/__main__.py,sha256=F65N1MUdAn5hO4qFuJ1v5M3XSCLHUKv7C010toZd-P4,45852
|
|
3
|
-
mlrun/config.py,sha256=
|
|
3
|
+
mlrun/config.py,sha256=sG_gjNcQ8UPoa9-nTfeT2UMLcZhbezcojs4RMHN4Xvk,65499
|
|
4
4
|
mlrun/errors.py,sha256=53oT_uQliD-CEe7jxJZMFlNOT86zCTYBl802MZYluaE,7395
|
|
5
|
-
mlrun/execution.py,sha256=
|
|
5
|
+
mlrun/execution.py,sha256=ZSk61dXqnEmxnUYxaAjtSzobMlD1yF9VaW56KLmaiUs,41717
|
|
6
6
|
mlrun/features.py,sha256=m17K_3l9Jktwb9dOwlHLTAPTlemsWrRF7dJhXUX0iJU,15429
|
|
7
7
|
mlrun/k8s_utils.py,sha256=WdUajadvAhTR7sAMQdwFqKeJMimuTyqm02VdwK1A4xU,7023
|
|
8
8
|
mlrun/lists.py,sha256=3PqBdcajdwhTe1XuFsAaHTuFVM2kjwepf31qqE82apg,8384
|
|
9
|
-
mlrun/model.py,sha256=
|
|
9
|
+
mlrun/model.py,sha256=bX9OrxF32jAcgphDPJoZkQ9Ov39ECLvdUvoAEcknJOk,72150
|
|
10
10
|
mlrun/render.py,sha256=uVI4kk7XqMAxamholZ_stPQ0nPUebij50ZpgAdjDQ6U,13131
|
|
11
11
|
mlrun/run.py,sha256=HpvrKd77umuOKXH245EaXKFTkzXeqG7D37e1IxoDsB8,42682
|
|
12
12
|
mlrun/secrets.py,sha256=ibtCK79u7JVBZF6F0SP1-xXXF5MyrLEUs_TCWiJAnlc,7798
|
|
13
13
|
mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
|
|
14
|
-
mlrun/alerts/alert.py,sha256=
|
|
14
|
+
mlrun/alerts/alert.py,sha256=dAXff9Y0UY5gwuUiofs6BngiNoebkVWI503oATihAXM,6040
|
|
15
15
|
mlrun/api/schemas/__init__.py,sha256=fEWH4I8hr5AdRJ7yoW44RlFB6NHkYDxyomP5J6ct1z4,14248
|
|
16
16
|
mlrun/artifacts/__init__.py,sha256=daGrLqltI1nE3ES30nm-tanUnxReRzfyxyaxNRx2zbc,1168
|
|
17
17
|
mlrun/artifacts/base.py,sha256=azVkiHaJq9JNFKlb91R1vwkdR2QEqF-rIn7bQIL6rf0,29148
|
|
@@ -119,7 +119,7 @@ mlrun/feature_store/retrieval/job.py,sha256=vm50yAqvaazuTGbCOgN_e1Ax8gh-d-qQN4Eb
|
|
|
119
119
|
mlrun/feature_store/retrieval/local_merger.py,sha256=jM-8ta44PeNUc1cKMPs-TxrO9t8pXbwu_Tw8MZrLxUY,4513
|
|
120
120
|
mlrun/feature_store/retrieval/spark_merger.py,sha256=I2KKEqSwao1AX1l6QqKRaXExUiry4P4ox-Vpc4AUNCg,11659
|
|
121
121
|
mlrun/feature_store/retrieval/storey_merger.py,sha256=5YM0UPrLjGOobulHkowRO-1LuvFD2cm_0GxcpnTdu0I,6314
|
|
122
|
-
mlrun/frameworks/__init__.py,sha256=
|
|
122
|
+
mlrun/frameworks/__init__.py,sha256=Qp9pZRiUcU-drO_NlZnr1Z3RkUSjK44ET9drfGGfiFM,1017
|
|
123
123
|
mlrun/frameworks/parallel_coordinates.py,sha256=AJ3TuvffAC4_zN-RVcyTkq1T3lomDqgeNf7hVBmscEw,11517
|
|
124
124
|
mlrun/frameworks/_common/__init__.py,sha256=7afutDCDVp999gyWSWQZMJRKGuW3VP3MFil8cobRsyg,962
|
|
125
125
|
mlrun/frameworks/_common/artifacts_library.py,sha256=f0rtDRQI3BYT2ZvXR4drSXZPYPJG19Sbej-_ru-i0II,8497
|
|
@@ -207,16 +207,16 @@ mlrun/launcher/__init__.py,sha256=JL8qkT1lLr1YvW6iP0hmwDTaSR2RfrMDx0-1gWRhTOE,57
|
|
|
207
207
|
mlrun/launcher/base.py,sha256=ud1qc2v66-84haAVBuQ2e0IsOzvd_bleSVVImwNWhwE,16461
|
|
208
208
|
mlrun/launcher/client.py,sha256=kgju2mvGuVlvJWRk8sL8qTKF0lf_cSPK2nqYz1oZy3E,6196
|
|
209
209
|
mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,2338
|
|
210
|
-
mlrun/launcher/local.py,sha256=
|
|
211
|
-
mlrun/launcher/remote.py,sha256=
|
|
210
|
+
mlrun/launcher/local.py,sha256=6t-iumKSd5MurNO0jshDAnG8IZWfpGCiW4JtH8UX9qI,11272
|
|
211
|
+
mlrun/launcher/remote.py,sha256=tGICSfWtvUHeR31mbzy6gqHejmDxjPUgjtxXTWhRubg,7699
|
|
212
212
|
mlrun/model_monitoring/__init__.py,sha256=dm5_j0_pwqrdzFwTaEtGnKfv2nVpNaM56nBI-oqLbNU,879
|
|
213
|
-
mlrun/model_monitoring/api.py,sha256=
|
|
213
|
+
mlrun/model_monitoring/api.py,sha256=KzPEPTiqsq5ELcV_O5jbPISdS5o9sO9jwWwSvDwG0hE,30363
|
|
214
214
|
mlrun/model_monitoring/application.py,sha256=RJ8HeAPfGO3P2A_dEZYNg60c1wKTADh2YSv8BQ5embg,745
|
|
215
215
|
mlrun/model_monitoring/controller.py,sha256=MQ4BF3vfJSyYZv6HuTuSLt_nqaflgBYyOSwCccbwaio,27981
|
|
216
216
|
mlrun/model_monitoring/controller_handler.py,sha256=J9Y9ppLsQaxyYRl21165Rr7QuI9EM-mk-5veAqs4Bi0,1336
|
|
217
217
|
mlrun/model_monitoring/evidently_application.py,sha256=iOc42IVjj8m6PDBmVcKIMWm46Bu0EdO9SDcH40Eqhyo,769
|
|
218
218
|
mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
|
|
219
|
-
mlrun/model_monitoring/helpers.py,sha256=
|
|
219
|
+
mlrun/model_monitoring/helpers.py,sha256=ROGQVZVoX7kgKI17dqIQSH-8r3t0K4oiwMRXPvfUGxE,11479
|
|
220
220
|
mlrun/model_monitoring/model_endpoint.py,sha256=7VX0cBATqLsA4sSinDzouf41ndxqh2mf5bO9BW0G5Z4,4017
|
|
221
221
|
mlrun/model_monitoring/prometheus.py,sha256=cUR4y73GutJB_pA_VCBDl9YtK4PcIJp2wj2rnLVmYi4,7578
|
|
222
222
|
mlrun/model_monitoring/stream_processing.py,sha256=TuFb1W-WYvynxU_kJl9h3xDn5Ir9b1MUwuynak-DK58,42467
|
|
@@ -224,7 +224,7 @@ mlrun/model_monitoring/tracking_policy.py,sha256=sQq956akAQpntkrJwIgFWcEq-JpyVcg
|
|
|
224
224
|
mlrun/model_monitoring/writer.py,sha256=cAQ24HbtWGA8czzaemmjLT4WfDInJ7gPpkkIp9LePBY,10013
|
|
225
225
|
mlrun/model_monitoring/applications/__init__.py,sha256=i793GqYee01mRh_KD6GShvX7UbPBgdJDO4qf9Z3BXEQ,970
|
|
226
226
|
mlrun/model_monitoring/applications/_application_steps.py,sha256=-g9jxIAFM5f22iJaUAQVlM8QRSv6KFT92I4WHmZe_f0,6028
|
|
227
|
-
mlrun/model_monitoring/applications/base.py,sha256=
|
|
227
|
+
mlrun/model_monitoring/applications/base.py,sha256=buVKyghH4AB3chZ5py1vyMIFnTF-deY8YDf_fPC9BnQ,11307
|
|
228
228
|
mlrun/model_monitoring/applications/context.py,sha256=i-9h6pWyrS8mjw53zd0kb_Dsf9ReS8cSfnth8PvOEI4,8571
|
|
229
229
|
mlrun/model_monitoring/applications/evidently_base.py,sha256=AE_eIz-GEYm3AZTrMCiqF9bcSMlvYk08LJb6bKWAQLg,8057
|
|
230
230
|
mlrun/model_monitoring/applications/histogram_data_drift.py,sha256=HZmNg09SCjAKkIlKmJwqR7hr-8sXrwFEqXgJCitVbXc,13039
|
|
@@ -242,23 +242,23 @@ mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py,sha256=yJJZppbKj3PsOANS_
|
|
|
242
242
|
mlrun/model_monitoring/db/stores/v3io_kv/__init__.py,sha256=6CsTXAxeLbbf8yfCADTaxmiavqwrLEdYFJ-qc5kgDAY,569
|
|
243
243
|
mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py,sha256=o8P80yowPj4NRLonJ8rdSffot1OB-V6i3Ji1m_TWvzs,27399
|
|
244
244
|
mlrun/model_monitoring/db/tsdb/__init__.py,sha256=NR895JSsEvNmINL223GLf8IbJ16b9Wn4XnxobDwivM8,3724
|
|
245
|
-
mlrun/model_monitoring/db/tsdb/base.py,sha256=
|
|
245
|
+
mlrun/model_monitoring/db/tsdb/base.py,sha256=l5SNjO3btJSeglOCn4NHnckntAaP6hcZBVRhOty3KSQ,13122
|
|
246
246
|
mlrun/model_monitoring/db/tsdb/helpers.py,sha256=0oUXc4aUkYtP2SGP6jTb3uPPKImIUsVsrb9otX9a7O4,1189
|
|
247
247
|
mlrun/model_monitoring/db/tsdb/tdengine/__init__.py,sha256=vgBdsKaXUURKqIf3M0y4sRatmSVA4CQiJs7J5dcVBkQ,620
|
|
248
248
|
mlrun/model_monitoring/db/tsdb/tdengine/schemas.py,sha256=94u886UtyK40YNtdOX8WiJUImDytygdaqIzFwo_ExzI,8881
|
|
249
249
|
mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py,sha256=x1cWM2ystghHUeDZNgnaN4kI_XjFOnh1FRBRJAX-tsw,1620
|
|
250
|
-
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=
|
|
250
|
+
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=oplt9s-C-OGa__V456nkHwvyBe5YHxcuIJcYV9GFQHY,15521
|
|
251
251
|
mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
|
|
252
252
|
mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=qbiyBzrdWLJAKLmJV4K8jUxsAMbKGZ1vip7WNfRcpXM,4764
|
|
253
|
-
mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=
|
|
253
|
+
mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=L6XXXr4Mkfad3iTwDyfKScdeuEamL-xtTjoKSwefi7w,26194
|
|
254
254
|
mlrun/model_monitoring/metrics/__init__.py,sha256=6CsTXAxeLbbf8yfCADTaxmiavqwrLEdYFJ-qc5kgDAY,569
|
|
255
255
|
mlrun/model_monitoring/metrics/histogram_distance.py,sha256=E9_WIl2vd6qNvoHVHoFcnuQk3ekbFWOdi8aU7sHrfk4,4724
|
|
256
|
-
mlrun/package/__init__.py,sha256=
|
|
256
|
+
mlrun/package/__init__.py,sha256=_h308qe3NijbH2xymZ_G6XIJSGAa3OX24Je76gP855Q,7941
|
|
257
257
|
mlrun/package/context_handler.py,sha256=Z8v7cXAZXa5l3Tgg6IiEVm74Qbp5cOxx30jvkAY3dwo,14589
|
|
258
258
|
mlrun/package/errors.py,sha256=LKF8SSaRIdbkB7JQz6b9U4mZV42Ebnf6ZHu4wKuWqK4,1204
|
|
259
259
|
mlrun/package/packager.py,sha256=xE7U1njB2RXhmiA0kCSmA4i5j84Dd7Bt-H4Fk5OcVLk,15064
|
|
260
260
|
mlrun/package/packagers_manager.py,sha256=g4XuqpKJGrGKYrA38FXZd9gquDv8KUcW1eXA-DesaMA,37161
|
|
261
|
-
mlrun/package/packagers/__init__.py,sha256=
|
|
261
|
+
mlrun/package/packagers/__init__.py,sha256=Mq7-KCjDFYPGcDByAROYT9dXHZx9-QDmeqCepqrwFm0,1039
|
|
262
262
|
mlrun/package/packagers/default_packager.py,sha256=QaZyxm03fRTJy5OGBeyVvSpEqnWj3-hSQVbsCjlTpLM,26625
|
|
263
263
|
mlrun/package/packagers/numpy_packagers.py,sha256=k7Vke41LOp1ExbXCKf4FyahBIDlBqSiYrGPMeH0yI7M,25602
|
|
264
264
|
mlrun/package/packagers/pandas_packagers.py,sha256=KPOZj1yiHxV2b1iah4hlwoNQP4JKzt95Fe9Tn9OUPs8,35761
|
|
@@ -273,23 +273,23 @@ mlrun/package/utils/type_hint_utils.py,sha256=JYrek6vuN3z7e6MGUD3qBLDfQ03C4puZXN
|
|
|
273
273
|
mlrun/platforms/__init__.py,sha256=ggSGF7inITs6S-vj9u4S9X_5psgbA0G3GVqf7zu8qYc,2406
|
|
274
274
|
mlrun/platforms/iguazio.py,sha256=1h5BpdAEQJBg2vIt7ySjUADU0ip5OkaMYr0_VREi9ys,13084
|
|
275
275
|
mlrun/projects/__init__.py,sha256=Lv5rfxyXJrw6WGOWJKhBz66M6t3_zsNMCfUD6waPwx4,1153
|
|
276
|
-
mlrun/projects/operations.py,sha256=
|
|
276
|
+
mlrun/projects/operations.py,sha256=NEN4PmSvLO9QMwSG4TncmBgTKC9wJp7hGo5lA7OYN_Q,19199
|
|
277
277
|
mlrun/projects/pipelines.py,sha256=c9HhMtXk-6kmTCiY-f0Cmd3GWgL_fBFE6HXp2lrhRtE,40009
|
|
278
|
-
mlrun/projects/project.py,sha256=
|
|
278
|
+
mlrun/projects/project.py,sha256=umxA0OnsZYtVNu2lmXuXCMMA-0s6iNpsa7uv3jT7THM,181355
|
|
279
279
|
mlrun/runtimes/__init__.py,sha256=0-tYDkew-Cr4DM-wztvMbzDA5xq385Jjo-GrtO_84Sc,8741
|
|
280
|
-
mlrun/runtimes/base.py,sha256=
|
|
281
|
-
mlrun/runtimes/daskjob.py,sha256=
|
|
280
|
+
mlrun/runtimes/base.py,sha256=SvaKfWtAKFhrX2RW7wy3B76OUAg9XdTDaJPpbCEhsdY,37323
|
|
281
|
+
mlrun/runtimes/daskjob.py,sha256=_3jQIEroNxG587ZJ0cW5nVJVBb1IcOECor_bkgZHtMk,19194
|
|
282
282
|
mlrun/runtimes/funcdoc.py,sha256=CC9cWRPgBiM2sk4NJTqusjc6O9kZ-49vGA5WRPjREKE,9796
|
|
283
283
|
mlrun/runtimes/function_reference.py,sha256=iWKRe4r2GTc5S8FOIASYUNLwwne8NqIui51PFr8Q4mg,4918
|
|
284
284
|
mlrun/runtimes/generators.py,sha256=v28HdNgxdHvj888G1dTnUeQZz-D9iTO0hoGeZbCdiuQ,7241
|
|
285
285
|
mlrun/runtimes/kubejob.py,sha256=ptBnMTIjukbEznkdixmbGvBqzujXrRzqNfP7ze6M76M,8660
|
|
286
|
-
mlrun/runtimes/local.py,sha256=
|
|
286
|
+
mlrun/runtimes/local.py,sha256=6dc-qDICQOK0iJJZz4KD498YbqyhF-Uk0_iVbaXdC00,22009
|
|
287
287
|
mlrun/runtimes/pod.py,sha256=I9cfZH-u7ZmAHKc8D7htzKILO1K9lzfoHjBOVe29trU,64406
|
|
288
288
|
mlrun/runtimes/remotesparkjob.py,sha256=9DPxDK8x08t9nReMo083TBxJiiqA83mHCbdtxrjj7AU,7426
|
|
289
289
|
mlrun/runtimes/utils.py,sha256=OFATL8d0c5vKN9N2enAu2oS3b4H71RfeG776ZnfZ0J4,14332
|
|
290
290
|
mlrun/runtimes/databricks_job/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
|
|
291
291
|
mlrun/runtimes/databricks_job/databricks_cancel_task.py,sha256=sIqIg5DQAf4j0wCPA-G0GoxY6vacRddxCy5KDUZszek,2245
|
|
292
|
-
mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=
|
|
292
|
+
mlrun/runtimes/databricks_job/databricks_runtime.py,sha256=p80j2_jHzlH20dHT-avjfcbaDBTY2re1WjlJjbg5uSQ,12794
|
|
293
293
|
mlrun/runtimes/databricks_job/databricks_wrapper.py,sha256=oJzym54jD957yzxRXiSYpituSV8JV_XJh90YTKIwapY,8684
|
|
294
294
|
mlrun/runtimes/mpijob/__init__.py,sha256=V_1gQD1VHa0Qvjqgyv8RLouH27Sy9YTwj2ZG62o32zU,1049
|
|
295
295
|
mlrun/runtimes/mpijob/abstract.py,sha256=kDWo-IY1FKLZhI30j38Xx9HMhlUvHezfd1DT2ShoxZY,9161
|
|
@@ -304,14 +304,14 @@ mlrun/runtimes/nuclio/application/application.py,sha256=f1GwB5IeavDYls1vHeEqkaOT
|
|
|
304
304
|
mlrun/runtimes/nuclio/application/reverse_proxy.go,sha256=JIIYae6bXzCLf3jXuu49KWPQYoXr_FDQ2Rbo1OWKAd0,3150
|
|
305
305
|
mlrun/runtimes/sparkjob/__init__.py,sha256=_KPvk0qefeLtHO6lxQE_AMOGiMTG_OT48eRCE4Z2ldw,709
|
|
306
306
|
mlrun/runtimes/sparkjob/spark3job.py,sha256=1bNRy72Migrh_ZASQOx7UlSZTbB-xpNc76sz4kfc9UM,41191
|
|
307
|
-
mlrun/serving/__init__.py,sha256
|
|
307
|
+
mlrun/serving/__init__.py,sha256=-SMRV3q_5cGVPDxRslXPU0zGYZIygs0cSj7WKlOJJUc,1163
|
|
308
308
|
mlrun/serving/merger.py,sha256=PXLn3A21FiLteJHaDSLm5xKNT-80eTTjfHUJnBX1gKY,6116
|
|
309
309
|
mlrun/serving/remote.py,sha256=MrFByphQWmIsKXqw-MOwl2Q1hbtWReYVRKvlcKj9pfw,17980
|
|
310
310
|
mlrun/serving/routers.py,sha256=scvpXD0VmgGRLJb2UqNq0o39ML2_F_SyZ4OXVQhJIOM,55086
|
|
311
311
|
mlrun/serving/server.py,sha256=U27KHG85Q-Eap3bX4sZlutH_YkpTr1oO89MlkHF9ACs,21081
|
|
312
312
|
mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBIIOM,836
|
|
313
|
-
mlrun/serving/states.py,sha256=
|
|
314
|
-
mlrun/serving/utils.py,sha256=
|
|
313
|
+
mlrun/serving/states.py,sha256=n3RPtzwqfQB1o4H80AoVsP5exL3L3i39ONs-CorWGyM,58539
|
|
314
|
+
mlrun/serving/utils.py,sha256=lej7XcUPX1MmHkEOi_0KZRGSpfbmpnE0GK_Sn4zLkHY,4025
|
|
315
315
|
mlrun/serving/v1_serving.py,sha256=by4myxlnwyZ0ijQ5fURilGCK1sUpdQL2Il1VR3Xqpxg,11805
|
|
316
316
|
mlrun/serving/v2_serving.py,sha256=l2J-kWRPf5vH6T-i96WJwmmzRF2QrWAjepmLVnq04rg,23863
|
|
317
317
|
mlrun/track/__init__.py,sha256=LWRUHJt8JyFW17FyNPOVyWd-NXTf1iptzsK9KFj5fuY,765
|
|
@@ -325,7 +325,7 @@ mlrun/utils/azure_vault.py,sha256=IEFizrDGDbAaoWwDr1WoA88S_EZ0T--vjYtY-i0cvYQ,34
|
|
|
325
325
|
mlrun/utils/clones.py,sha256=mJpx4nyFiY6jlBCvFABsNuyi_mr1mvfPWn81vlafpOU,7361
|
|
326
326
|
mlrun/utils/condition_evaluator.py,sha256=-nGfRmZzivn01rHTroiGY4rqEv8T1irMyhzxEei-sKc,1897
|
|
327
327
|
mlrun/utils/db.py,sha256=KEa-vzicUhzIwo1wBXax2ZuXtYgf5to7wnsY3CYCiOQ,1713
|
|
328
|
-
mlrun/utils/helpers.py,sha256=
|
|
328
|
+
mlrun/utils/helpers.py,sha256=bJdHRqIaLI6th5FK6IA1gKvypoazeYgV0IjtLyKFgRY,55436
|
|
329
329
|
mlrun/utils/http.py,sha256=l_JCPrCq8bfYUcUcAFWUPvb9Xu-93bLGIhV-H-XCU9s,8707
|
|
330
330
|
mlrun/utils/logger.py,sha256=CG5pgkMeU3VAkIP0pSGOwvFtm0tJYzmPVF8jEp2EtlU,9073
|
|
331
331
|
mlrun/utils/regex.py,sha256=b0AUa2THS-ELzJj0grl5b8Stq609F2XomTZkD9SB1fQ,4900
|
|
@@ -343,11 +343,11 @@ mlrun/utils/notifications/notification/ipython.py,sha256=ZtVL30B_Ha0VGoo4LxO-voT
|
|
|
343
343
|
mlrun/utils/notifications/notification/slack.py,sha256=Vc6EHdnVAZe-p4ZWMvLc23YjMIDE3h2flf2b83ATVCA,7286
|
|
344
344
|
mlrun/utils/notifications/notification/webhook.py,sha256=WgfxX1cpm8n2A-O08pwnsP4tzbxxv_vNUSnyXG4uKts,2752
|
|
345
345
|
mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
|
|
346
|
-
mlrun/utils/version/version.json,sha256=
|
|
346
|
+
mlrun/utils/version/version.json,sha256=YTYzIsYgfu2QIFU3klixQlisgpqRt5PED9zmOJONRXA,89
|
|
347
347
|
mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
|
|
348
|
-
mlrun-1.7.
|
|
349
|
-
mlrun-1.7.
|
|
350
|
-
mlrun-1.7.
|
|
351
|
-
mlrun-1.7.
|
|
352
|
-
mlrun-1.7.
|
|
353
|
-
mlrun-1.7.
|
|
348
|
+
mlrun-1.7.0rc22.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
349
|
+
mlrun-1.7.0rc22.dist-info/METADATA,sha256=1YFwgv7LW2rwwHez3LXjihC5KrpjncBo3J5DUb6iDx8,19237
|
|
350
|
+
mlrun-1.7.0rc22.dist-info/WHEEL,sha256=GJ7t_kWBFywbagK5eo9IoUwLW6oyOeTKmQ-9iHFVNxQ,92
|
|
351
|
+
mlrun-1.7.0rc22.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
|
|
352
|
+
mlrun-1.7.0rc22.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
|
|
353
|
+
mlrun-1.7.0rc22.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|