mlrun 1.7.0rc9__py3-none-any.whl → 1.7.0rc12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +1 -0
- mlrun/artifacts/manager.py +17 -6
- mlrun/artifacts/model.py +29 -25
- mlrun/common/schemas/__init__.py +1 -0
- mlrun/common/schemas/alert.py +122 -0
- mlrun/common/schemas/auth.py +4 -0
- mlrun/common/schemas/client_spec.py +1 -0
- mlrun/common/schemas/model_monitoring/constants.py +3 -1
- mlrun/config.py +6 -3
- mlrun/datastore/__init__.py +4 -3
- mlrun/datastore/base.py +6 -5
- mlrun/datastore/sources.py +9 -4
- mlrun/datastore/targets.py +11 -3
- mlrun/datastore/utils.py +16 -0
- mlrun/datastore/v3io.py +27 -50
- mlrun/db/base.py +44 -2
- mlrun/db/httpdb.py +192 -20
- mlrun/db/nopdb.py +36 -1
- mlrun/execution.py +21 -14
- mlrun/feature_store/api.py +6 -3
- mlrun/feature_store/feature_set.py +39 -23
- mlrun/feature_store/feature_vector.py +2 -1
- mlrun/feature_store/steps.py +30 -19
- mlrun/features.py +4 -13
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +2 -2
- mlrun/frameworks/lgbm/__init__.py +1 -1
- mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
- mlrun/frameworks/lgbm/model_handler.py +1 -1
- mlrun/frameworks/pytorch/__init__.py +2 -2
- mlrun/frameworks/sklearn/__init__.py +1 -1
- mlrun/frameworks/tf_keras/__init__.py +1 -1
- mlrun/frameworks/xgboost/__init__.py +1 -1
- mlrun/model.py +2 -2
- mlrun/model_monitoring/application.py +11 -2
- mlrun/model_monitoring/applications/histogram_data_drift.py +3 -3
- mlrun/model_monitoring/controller.py +2 -3
- mlrun/model_monitoring/stream_processing.py +0 -1
- mlrun/model_monitoring/writer.py +32 -0
- mlrun/package/packagers_manager.py +1 -0
- mlrun/platforms/__init__.py +1 -1
- mlrun/platforms/other.py +1 -1
- mlrun/projects/operations.py +11 -4
- mlrun/projects/project.py +168 -62
- mlrun/run.py +72 -40
- mlrun/runtimes/mpijob/abstract.py +8 -8
- mlrun/runtimes/nuclio/function.py +9 -5
- mlrun/runtimes/nuclio/serving.py +12 -14
- mlrun/runtimes/pod.py +3 -3
- mlrun/secrets.py +6 -2
- mlrun/serving/routers.py +3 -1
- mlrun/serving/states.py +9 -35
- mlrun/serving/v2_serving.py +4 -4
- mlrun/utils/helpers.py +1 -1
- mlrun/utils/notifications/notification/base.py +12 -0
- mlrun/utils/notifications/notification/console.py +2 -0
- mlrun/utils/notifications/notification/git.py +3 -1
- mlrun/utils/notifications/notification/ipython.py +2 -0
- mlrun/utils/notifications/notification/slack.py +41 -13
- mlrun/utils/notifications/notification/webhook.py +11 -1
- mlrun/utils/retryer.py +2 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/RECORD +67 -67
- mlrun/datastore/helpers.py +0 -18
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/top_level.txt +0 -0
mlrun/db/base.py
CHANGED
|
@@ -654,6 +654,37 @@ class RunDBInterface(ABC):
|
|
|
654
654
|
):
|
|
655
655
|
pass
|
|
656
656
|
|
|
657
|
+
@abstractmethod
|
|
658
|
+
def generate_event(
|
|
659
|
+
self, name: str, event_data: Union[dict, mlrun.common.schemas.Event], project=""
|
|
660
|
+
):
|
|
661
|
+
pass
|
|
662
|
+
|
|
663
|
+
@abstractmethod
|
|
664
|
+
def store_alert_config(
|
|
665
|
+
self,
|
|
666
|
+
alert_name: str,
|
|
667
|
+
alert_data: Union[dict, mlrun.common.schemas.AlertConfig],
|
|
668
|
+
project="",
|
|
669
|
+
):
|
|
670
|
+
pass
|
|
671
|
+
|
|
672
|
+
@abstractmethod
|
|
673
|
+
def get_alert_config(self, alert_name: str, project=""):
|
|
674
|
+
pass
|
|
675
|
+
|
|
676
|
+
@abstractmethod
|
|
677
|
+
def list_alerts_configs(self, project=""):
|
|
678
|
+
pass
|
|
679
|
+
|
|
680
|
+
@abstractmethod
|
|
681
|
+
def delete_alert_config(self, alert_name: str, project=""):
|
|
682
|
+
pass
|
|
683
|
+
|
|
684
|
+
@abstractmethod
|
|
685
|
+
def reset_alert_config(self, alert_name: str, project=""):
|
|
686
|
+
pass
|
|
687
|
+
|
|
657
688
|
@abstractmethod
|
|
658
689
|
def get_builder_status(
|
|
659
690
|
self,
|
|
@@ -697,6 +728,17 @@ class RunDBInterface(ABC):
|
|
|
697
728
|
def get_log_size(self, uid, project=""):
|
|
698
729
|
pass
|
|
699
730
|
|
|
731
|
+
@abstractmethod
|
|
732
|
+
def store_alert_notifications(
|
|
733
|
+
self,
|
|
734
|
+
session,
|
|
735
|
+
notification_objects: list[mlrun.model.Notification],
|
|
736
|
+
alert_id: str,
|
|
737
|
+
project: str,
|
|
738
|
+
mask_params: bool = True,
|
|
739
|
+
):
|
|
740
|
+
pass
|
|
741
|
+
|
|
700
742
|
@abstractmethod
|
|
701
743
|
def watch_log(self, uid, project="", watch=True, offset=0):
|
|
702
744
|
pass
|
|
@@ -771,10 +813,10 @@ class RunDBInterface(ABC):
|
|
|
771
813
|
image: str = "mlrun/mlrun",
|
|
772
814
|
deploy_histogram_data_drift_app: bool = True,
|
|
773
815
|
) -> None:
|
|
774
|
-
|
|
816
|
+
pass
|
|
775
817
|
|
|
776
818
|
@abstractmethod
|
|
777
819
|
def deploy_histogram_data_drift_app(
|
|
778
820
|
self, project: str, image: str = "mlrun/mlrun"
|
|
779
821
|
) -> None:
|
|
780
|
-
|
|
822
|
+
pass
|
mlrun/db/httpdb.py
CHANGED
|
@@ -20,6 +20,7 @@ import time
|
|
|
20
20
|
import traceback
|
|
21
21
|
import typing
|
|
22
22
|
import warnings
|
|
23
|
+
from copy import deepcopy
|
|
23
24
|
from datetime import datetime, timedelta
|
|
24
25
|
from os import path, remove
|
|
25
26
|
from typing import Optional, Union
|
|
@@ -282,6 +283,68 @@ class HTTPRunDB(RunDBInterface):
|
|
|
282
283
|
|
|
283
284
|
return response
|
|
284
285
|
|
|
286
|
+
def paginated_api_call(
|
|
287
|
+
self,
|
|
288
|
+
method,
|
|
289
|
+
path,
|
|
290
|
+
error=None,
|
|
291
|
+
params=None,
|
|
292
|
+
body=None,
|
|
293
|
+
json=None,
|
|
294
|
+
headers=None,
|
|
295
|
+
timeout=45,
|
|
296
|
+
version=None,
|
|
297
|
+
) -> typing.Generator[requests.Response, None, None]:
|
|
298
|
+
"""
|
|
299
|
+
Calls the api with pagination, yielding each page of the response
|
|
300
|
+
"""
|
|
301
|
+
|
|
302
|
+
def _api_call(_params):
|
|
303
|
+
return self.api_call(
|
|
304
|
+
method=method,
|
|
305
|
+
path=path,
|
|
306
|
+
error=error,
|
|
307
|
+
params=_params,
|
|
308
|
+
body=body,
|
|
309
|
+
json=json,
|
|
310
|
+
headers=headers,
|
|
311
|
+
timeout=timeout,
|
|
312
|
+
version=version,
|
|
313
|
+
)
|
|
314
|
+
|
|
315
|
+
first_page_params = deepcopy(params) or {}
|
|
316
|
+
first_page_params["page"] = 1
|
|
317
|
+
first_page_params["page-size"] = config.httpdb.pagination.default_page_size
|
|
318
|
+
response = _api_call(first_page_params)
|
|
319
|
+
page_token = response.json().get("pagination", {}).get("page-token")
|
|
320
|
+
if not page_token:
|
|
321
|
+
yield response
|
|
322
|
+
return
|
|
323
|
+
|
|
324
|
+
params_with_page_token = deepcopy(params) or {}
|
|
325
|
+
params_with_page_token["page-token"] = page_token
|
|
326
|
+
while page_token:
|
|
327
|
+
yield response
|
|
328
|
+
try:
|
|
329
|
+
response = _api_call(params_with_page_token)
|
|
330
|
+
except mlrun.errors.MLRunNotFoundError:
|
|
331
|
+
# pagination token expired
|
|
332
|
+
break
|
|
333
|
+
|
|
334
|
+
page_token = response.json().get("pagination", {}).get("page-token", None)
|
|
335
|
+
|
|
336
|
+
@staticmethod
|
|
337
|
+
def process_paginated_responses(
|
|
338
|
+
responses: typing.Generator[requests.Response, None, None], key: str = "data"
|
|
339
|
+
) -> list[typing.Any]:
|
|
340
|
+
"""
|
|
341
|
+
Processes the paginated responses and returns the combined data
|
|
342
|
+
"""
|
|
343
|
+
data = []
|
|
344
|
+
for response in responses:
|
|
345
|
+
data.extend(response.json().get(key, []))
|
|
346
|
+
return data
|
|
347
|
+
|
|
285
348
|
def _init_session(self, retry_on_post: bool = False):
|
|
286
349
|
return mlrun.utils.HTTPSessionWithRetry(
|
|
287
350
|
retry_on_exception=config.httpdb.retry_api_call_on_exception
|
|
@@ -314,7 +377,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
314
377
|
|
|
315
378
|
For example::
|
|
316
379
|
|
|
317
|
-
config.dbpath = config.dbpath or
|
|
380
|
+
config.dbpath = config.dbpath or "http://mlrun-api:8080"
|
|
318
381
|
db = get_run_db().connect()
|
|
319
382
|
"""
|
|
320
383
|
# hack to allow unit tests to instantiate HTTPRunDB without a real server behind
|
|
@@ -466,6 +529,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
466
529
|
server_cfg.get("feature_store_default_targets")
|
|
467
530
|
or config.feature_store.default_targets
|
|
468
531
|
)
|
|
532
|
+
config.alerts.mode = server_cfg.get("alerts_mode") or config.alerts.mode
|
|
469
533
|
|
|
470
534
|
except Exception as exc:
|
|
471
535
|
logger.warning(
|
|
@@ -698,9 +762,11 @@ class HTTPRunDB(RunDBInterface):
|
|
|
698
762
|
|
|
699
763
|
Example::
|
|
700
764
|
|
|
701
|
-
runs = db.list_runs(
|
|
765
|
+
runs = db.list_runs(
|
|
766
|
+
name="download", project="iris", labels=["owner=admin", "kind=job"]
|
|
767
|
+
)
|
|
702
768
|
# If running in Jupyter, can use the .show() function to display the results
|
|
703
|
-
db.list_runs(name=
|
|
769
|
+
db.list_runs(name="", project=project_name).show()
|
|
704
770
|
|
|
705
771
|
|
|
706
772
|
:param name: Name of the run to retrieve.
|
|
@@ -791,15 +857,15 @@ class HTTPRunDB(RunDBInterface):
|
|
|
791
857
|
)
|
|
792
858
|
error = "list runs"
|
|
793
859
|
_path = self._path_of("runs", project)
|
|
794
|
-
|
|
795
|
-
return RunList(
|
|
860
|
+
responses = self.paginated_api_call("GET", _path, error, params=params)
|
|
861
|
+
return RunList(self.process_paginated_responses(responses, "runs"))
|
|
796
862
|
|
|
797
863
|
def del_runs(self, name=None, project=None, labels=None, state=None, days_ago=0):
|
|
798
864
|
"""Delete a group of runs identified by the parameters of the function.
|
|
799
865
|
|
|
800
866
|
Example::
|
|
801
867
|
|
|
802
|
-
db.del_runs(state=
|
|
868
|
+
db.del_runs(state="completed")
|
|
803
869
|
|
|
804
870
|
:param name: Name of the task which the runs belong to.
|
|
805
871
|
:param project: Project to which the runs belong.
|
|
@@ -944,11 +1010,13 @@ class HTTPRunDB(RunDBInterface):
|
|
|
944
1010
|
Examples::
|
|
945
1011
|
|
|
946
1012
|
# Show latest version of all artifacts in project
|
|
947
|
-
latest_artifacts = db.list_artifacts(
|
|
1013
|
+
latest_artifacts = db.list_artifacts("", tag="latest", project="iris")
|
|
948
1014
|
# check different artifact versions for a specific artifact
|
|
949
|
-
result_versions = db.list_artifacts(
|
|
1015
|
+
result_versions = db.list_artifacts("results", tag="*", project="iris")
|
|
950
1016
|
# Show artifacts with label filters - both uploaded and of binary type
|
|
951
|
-
result_labels = db.list_artifacts(
|
|
1017
|
+
result_labels = db.list_artifacts(
|
|
1018
|
+
"results", tag="*", project="iris", labels=["uploaded", "type=binary"]
|
|
1019
|
+
)
|
|
952
1020
|
|
|
953
1021
|
:param name: Name of artifacts to retrieve. Name with '~' prefix is used as a like query, and is not
|
|
954
1022
|
case-sensitive. This means that querying for ``~name`` may return artifacts named
|
|
@@ -1093,8 +1161,8 @@ class HTTPRunDB(RunDBInterface):
|
|
|
1093
1161
|
}
|
|
1094
1162
|
error = "list functions"
|
|
1095
1163
|
path = f"projects/{project}/functions"
|
|
1096
|
-
|
|
1097
|
-
return
|
|
1164
|
+
responses = self.paginated_api_call("GET", path, error, params=params)
|
|
1165
|
+
return self.process_paginated_responses(responses, "funcs")
|
|
1098
1166
|
|
|
1099
1167
|
def list_runtime_resources(
|
|
1100
1168
|
self,
|
|
@@ -1240,7 +1308,9 @@ class HTTPRunDB(RunDBInterface):
|
|
|
1240
1308
|
name="run_func_on_tuesdays",
|
|
1241
1309
|
kind="job",
|
|
1242
1310
|
scheduled_object=get_data_func,
|
|
1243
|
-
cron_trigger=schemas.ScheduleCronTrigger(
|
|
1311
|
+
cron_trigger=schemas.ScheduleCronTrigger(
|
|
1312
|
+
day_of_week="tue", hour=15, minute=30
|
|
1313
|
+
),
|
|
1244
1314
|
)
|
|
1245
1315
|
db.create_schedule(project_name, schedule)
|
|
1246
1316
|
"""
|
|
@@ -2133,7 +2203,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
2133
2203
|
not a full object.
|
|
2134
2204
|
Example::
|
|
2135
2205
|
|
|
2136
|
-
feature_set_update = {"status": {"processed"
|
|
2206
|
+
feature_set_update = {"status": {"processed": True}}
|
|
2137
2207
|
|
|
2138
2208
|
Will apply the field ``status.processed`` to the existing object.
|
|
2139
2209
|
:param project: Project which contains the modified object.
|
|
@@ -2707,11 +2777,11 @@ class HTTPRunDB(RunDBInterface):
|
|
|
2707
2777
|
:param secrets: A set of secret values to store.
|
|
2708
2778
|
Example::
|
|
2709
2779
|
|
|
2710
|
-
secrets = {
|
|
2780
|
+
secrets = {"password": "myPassw0rd", "aws_key": "111222333"}
|
|
2711
2781
|
db.create_project_secrets(
|
|
2712
2782
|
"project1",
|
|
2713
2783
|
provider=mlrun.common.schemas.SecretProviderName.kubernetes,
|
|
2714
|
-
secrets=secrets
|
|
2784
|
+
secrets=secrets,
|
|
2715
2785
|
)
|
|
2716
2786
|
"""
|
|
2717
2787
|
path = f"projects/{project}/secrets"
|
|
@@ -3239,8 +3309,10 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3239
3309
|
metadata=mlrun.common.schemas.HubObjectMetadata(
|
|
3240
3310
|
name="priv", description="a private source"
|
|
3241
3311
|
),
|
|
3242
|
-
spec=mlrun.common.schemas.HubSourceSpec(
|
|
3243
|
-
|
|
3312
|
+
spec=mlrun.common.schemas.HubSourceSpec(
|
|
3313
|
+
path="/local/path/to/source", channel="development"
|
|
3314
|
+
),
|
|
3315
|
+
),
|
|
3244
3316
|
)
|
|
3245
3317
|
db.create_hub_source(private_source)
|
|
3246
3318
|
|
|
@@ -3254,9 +3326,9 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3254
3326
|
spec=mlrun.common.schemas.HubSourceSpec(
|
|
3255
3327
|
path="/local/path/to/source/2",
|
|
3256
3328
|
channel="development",
|
|
3257
|
-
credentials={...}
|
|
3258
|
-
)
|
|
3259
|
-
)
|
|
3329
|
+
credentials={...},
|
|
3330
|
+
),
|
|
3331
|
+
),
|
|
3260
3332
|
)
|
|
3261
3333
|
db.create_hub_source(another_source)
|
|
3262
3334
|
|
|
@@ -3596,6 +3668,16 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3596
3668
|
"""
|
|
3597
3669
|
pass
|
|
3598
3670
|
|
|
3671
|
+
def store_alert_notifications(
|
|
3672
|
+
self,
|
|
3673
|
+
session,
|
|
3674
|
+
notification_objects: list[mlrun.model.Notification],
|
|
3675
|
+
alert_id: str,
|
|
3676
|
+
project: str,
|
|
3677
|
+
mask_params: bool = True,
|
|
3678
|
+
):
|
|
3679
|
+
pass
|
|
3680
|
+
|
|
3599
3681
|
def submit_workflow(
|
|
3600
3682
|
self,
|
|
3601
3683
|
project: str,
|
|
@@ -3804,6 +3886,96 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3804
3886
|
default_docker_registry=config.httpdb.builder.docker_registry,
|
|
3805
3887
|
)
|
|
3806
3888
|
|
|
3889
|
+
def generate_event(
|
|
3890
|
+
self, name: str, event_data: Union[dict, mlrun.common.schemas.Event], project=""
|
|
3891
|
+
):
|
|
3892
|
+
"""
|
|
3893
|
+
Generate an event.
|
|
3894
|
+
:param name: The name of the event.
|
|
3895
|
+
:param event_data: The data of the event.
|
|
3896
|
+
:param project: The project that the event belongs to.
|
|
3897
|
+
"""
|
|
3898
|
+
project = project or config.default_project
|
|
3899
|
+
endpoint_path = f"projects/{project}/events/{name}"
|
|
3900
|
+
error_message = f"post event {project}/events/{name}"
|
|
3901
|
+
if isinstance(event_data, mlrun.common.schemas.Event):
|
|
3902
|
+
event_data = event_data.dict()
|
|
3903
|
+
self.api_call(
|
|
3904
|
+
"POST", endpoint_path, error_message, body=dict_to_json(event_data)
|
|
3905
|
+
)
|
|
3906
|
+
|
|
3907
|
+
def store_alert_config(
|
|
3908
|
+
self,
|
|
3909
|
+
alert_name: str,
|
|
3910
|
+
alert_data: Union[dict, mlrun.common.schemas.AlertConfig],
|
|
3911
|
+
project="",
|
|
3912
|
+
):
|
|
3913
|
+
"""
|
|
3914
|
+
Create/modify an alert.
|
|
3915
|
+
:param alert_name: The name of the alert.
|
|
3916
|
+
:param alert_data: The data of the alert.
|
|
3917
|
+
:param project: the project that the alert belongs to.
|
|
3918
|
+
:return: The created/modified alert.
|
|
3919
|
+
"""
|
|
3920
|
+
project = project or config.default_project
|
|
3921
|
+
endpoint_path = f"projects/{project}/alerts/{alert_name}"
|
|
3922
|
+
error_message = f"put alert {project}/alerts/{alert_name}"
|
|
3923
|
+
if isinstance(alert_data, mlrun.common.schemas.AlertConfig):
|
|
3924
|
+
alert_data = alert_data.dict()
|
|
3925
|
+
body = _as_json(alert_data)
|
|
3926
|
+
response = self.api_call("PUT", endpoint_path, error_message, body=body)
|
|
3927
|
+
return mlrun.common.schemas.AlertConfig(**response.json())
|
|
3928
|
+
|
|
3929
|
+
def get_alert_config(self, alert_name: str, project=""):
|
|
3930
|
+
"""
|
|
3931
|
+
Retrieve an alert.
|
|
3932
|
+
:param alert_name: The name of the alert to retrieve.
|
|
3933
|
+
:param project: The project that the alert belongs to.
|
|
3934
|
+
:return: The alert object.
|
|
3935
|
+
"""
|
|
3936
|
+
project = project or config.default_project
|
|
3937
|
+
endpoint_path = f"projects/{project}/alerts/{alert_name}"
|
|
3938
|
+
error_message = f"get alert {project}/alerts/{alert_name}"
|
|
3939
|
+
response = self.api_call("GET", endpoint_path, error_message)
|
|
3940
|
+
return mlrun.common.schemas.AlertConfig(**response.json())
|
|
3941
|
+
|
|
3942
|
+
def list_alerts_configs(self, project=""):
|
|
3943
|
+
"""
|
|
3944
|
+
Retrieve list of alerts of a project.
|
|
3945
|
+
:param project: The project name.
|
|
3946
|
+
:return: All the alerts objects of the project.
|
|
3947
|
+
"""
|
|
3948
|
+
project = project or config.default_project
|
|
3949
|
+
endpoint_path = f"projects/{project}/alerts"
|
|
3950
|
+
error_message = f"get alerts {project}/alerts"
|
|
3951
|
+
response = self.api_call("GET", endpoint_path, error_message).json()
|
|
3952
|
+
results = []
|
|
3953
|
+
for item in response:
|
|
3954
|
+
results.append(mlrun.common.schemas.AlertConfig(**item))
|
|
3955
|
+
return results
|
|
3956
|
+
|
|
3957
|
+
def delete_alert_config(self, alert_name: str, project=""):
|
|
3958
|
+
"""
|
|
3959
|
+
Delete an alert.
|
|
3960
|
+
:param alert_name: The name of the alert to delete.
|
|
3961
|
+
:param project: The project that the alert belongs to.
|
|
3962
|
+
"""
|
|
3963
|
+
project = project or config.default_project
|
|
3964
|
+
endpoint_path = f"projects/{project}/alerts/{alert_name}"
|
|
3965
|
+
error_message = f"delete alert {project}/alerts/{alert_name}"
|
|
3966
|
+
self.api_call("DELETE", endpoint_path, error_message)
|
|
3967
|
+
|
|
3968
|
+
def reset_alert_config(self, alert_name: str, project=""):
|
|
3969
|
+
"""
|
|
3970
|
+
Reset an alert.
|
|
3971
|
+
:param alert_name: The name of the alert to reset.
|
|
3972
|
+
:param project: The project that the alert belongs to.
|
|
3973
|
+
"""
|
|
3974
|
+
project = project or config.default_project
|
|
3975
|
+
endpoint_path = f"projects/{project}/alerts/{alert_name}/reset"
|
|
3976
|
+
error_message = f"post alert {project}/alerts/{alert_name}/reset"
|
|
3977
|
+
self.api_call("POST", endpoint_path, error_message)
|
|
3978
|
+
|
|
3807
3979
|
|
|
3808
3980
|
def _as_json(obj):
|
|
3809
3981
|
fn = getattr(obj, "to_json", None)
|
mlrun/db/nopdb.py
CHANGED
|
@@ -581,6 +581,16 @@ class NopDB(RunDBInterface):
|
|
|
581
581
|
):
|
|
582
582
|
pass
|
|
583
583
|
|
|
584
|
+
def store_alert_notifications(
|
|
585
|
+
self,
|
|
586
|
+
session,
|
|
587
|
+
notification_objects: list[mlrun.model.Notification],
|
|
588
|
+
alert_id: str,
|
|
589
|
+
project: str,
|
|
590
|
+
mask_params: bool = True,
|
|
591
|
+
):
|
|
592
|
+
pass
|
|
593
|
+
|
|
584
594
|
def get_log_size(self, uid, project=""):
|
|
585
595
|
pass
|
|
586
596
|
|
|
@@ -646,9 +656,34 @@ class NopDB(RunDBInterface):
|
|
|
646
656
|
image: str = "mlrun/mlrun",
|
|
647
657
|
deploy_histogram_data_drift_app: bool = True,
|
|
648
658
|
) -> None:
|
|
649
|
-
|
|
659
|
+
pass
|
|
650
660
|
|
|
651
661
|
def deploy_histogram_data_drift_app(
|
|
652
662
|
self, project: str, image: str = "mlrun/mlrun"
|
|
653
663
|
) -> None:
|
|
654
664
|
raise NotImplementedError
|
|
665
|
+
|
|
666
|
+
def generate_event(
|
|
667
|
+
self, name: str, event_data: Union[dict, mlrun.common.schemas.Event], project=""
|
|
668
|
+
):
|
|
669
|
+
pass
|
|
670
|
+
|
|
671
|
+
def store_alert_config(
|
|
672
|
+
self,
|
|
673
|
+
alert_name: str,
|
|
674
|
+
alert_data: Union[dict, mlrun.common.schemas.AlertConfig],
|
|
675
|
+
project="",
|
|
676
|
+
):
|
|
677
|
+
pass
|
|
678
|
+
|
|
679
|
+
def get_alert_config(self, alert_name: str, project=""):
|
|
680
|
+
pass
|
|
681
|
+
|
|
682
|
+
def list_alerts_configs(self, project=""):
|
|
683
|
+
pass
|
|
684
|
+
|
|
685
|
+
def delete_alert_config(self, alert_name: str, project=""):
|
|
686
|
+
pass
|
|
687
|
+
|
|
688
|
+
def reset_alert_config(self, alert_name: str, project=""):
|
|
689
|
+
pass
|
mlrun/execution.py
CHANGED
|
@@ -224,12 +224,12 @@ class MLClientCtx:
|
|
|
224
224
|
with context.get_child_context(myparam=param) as child:
|
|
225
225
|
accuracy = child_handler(child, df, **child.parameters)
|
|
226
226
|
accuracy_sum += accuracy
|
|
227
|
-
child.log_result(
|
|
227
|
+
child.log_result("accuracy", accuracy)
|
|
228
228
|
if accuracy > best_accuracy:
|
|
229
229
|
child.mark_as_best()
|
|
230
230
|
best_accuracy = accuracy
|
|
231
231
|
|
|
232
|
-
context.log_result(
|
|
232
|
+
context.log_result("avg_accuracy", accuracy_sum / len(param_list))
|
|
233
233
|
|
|
234
234
|
:param params: Extra (or override) params to parent context
|
|
235
235
|
:param with_parent_params: Child will copy the parent parameters and add to them
|
|
@@ -289,7 +289,9 @@ class MLClientCtx:
|
|
|
289
289
|
|
|
290
290
|
Example::
|
|
291
291
|
|
|
292
|
-
feature_vector = context.get_store_resource(
|
|
292
|
+
feature_vector = context.get_store_resource(
|
|
293
|
+
"store://feature-vectors/default/myvec"
|
|
294
|
+
)
|
|
293
295
|
dataset = context.get_store_resource("store://artifacts/default/mydata")
|
|
294
296
|
|
|
295
297
|
:param url: Store resource uri/path, store://<type>/<project>/<name>:<version>
|
|
@@ -421,7 +423,7 @@ class MLClientCtx:
|
|
|
421
423
|
|
|
422
424
|
Example::
|
|
423
425
|
|
|
424
|
-
data_path=context.artifact_subpath(
|
|
426
|
+
data_path = context.artifact_subpath("data")
|
|
425
427
|
|
|
426
428
|
"""
|
|
427
429
|
return os.path.join(self.artifact_path, *subpaths)
|
|
@@ -525,7 +527,7 @@ class MLClientCtx:
|
|
|
525
527
|
|
|
526
528
|
Example::
|
|
527
529
|
|
|
528
|
-
context.log_result(
|
|
530
|
+
context.log_result("accuracy", 0.85)
|
|
529
531
|
|
|
530
532
|
:param key: Result key
|
|
531
533
|
:param value: Result value
|
|
@@ -539,7 +541,7 @@ class MLClientCtx:
|
|
|
539
541
|
|
|
540
542
|
Example::
|
|
541
543
|
|
|
542
|
-
context.log_results({
|
|
544
|
+
context.log_results({"accuracy": 0.85, "loss": 0.2})
|
|
543
545
|
|
|
544
546
|
:param results: Key/value dict or results
|
|
545
547
|
:param commit: Commit (write to DB now vs wait for the end of the run)
|
|
@@ -674,7 +676,9 @@ class MLClientCtx:
|
|
|
674
676
|
"age": [42, 52, 36, 24, 73],
|
|
675
677
|
"testScore": [25, 94, 57, 62, 70],
|
|
676
678
|
}
|
|
677
|
-
df = pd.DataFrame(
|
|
679
|
+
df = pd.DataFrame(
|
|
680
|
+
raw_data, columns=["first_name", "last_name", "age", "testScore"]
|
|
681
|
+
)
|
|
678
682
|
context.log_dataset("mydf", df=df, stats=True)
|
|
679
683
|
|
|
680
684
|
:param key: Artifact key
|
|
@@ -752,13 +756,16 @@ class MLClientCtx:
|
|
|
752
756
|
|
|
753
757
|
Example::
|
|
754
758
|
|
|
755
|
-
context.log_model(
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
761
|
-
|
|
759
|
+
context.log_model(
|
|
760
|
+
"model",
|
|
761
|
+
body=dumps(model),
|
|
762
|
+
model_file="model.pkl",
|
|
763
|
+
metrics=context.results,
|
|
764
|
+
training_set=training_df,
|
|
765
|
+
label_column="label",
|
|
766
|
+
feature_vector=feature_vector_uri,
|
|
767
|
+
labels={"app": "fraud"},
|
|
768
|
+
)
|
|
762
769
|
|
|
763
770
|
:param key: Artifact key or artifact class ()
|
|
764
771
|
:param body: Will use the body as the artifact content
|
mlrun/feature_store/api.py
CHANGED
|
@@ -136,7 +136,10 @@ def get_offline_features(
|
|
|
136
136
|
]
|
|
137
137
|
vector = FeatureVector(features=features)
|
|
138
138
|
resp = get_offline_features(
|
|
139
|
-
vector,
|
|
139
|
+
vector,
|
|
140
|
+
entity_rows=trades,
|
|
141
|
+
entity_timestamp_column="time",
|
|
142
|
+
query="ticker in ['GOOG'] and bid>100",
|
|
140
143
|
)
|
|
141
144
|
print(resp.to_dataframe())
|
|
142
145
|
print(vector.get_stats_table())
|
|
@@ -307,7 +310,7 @@ def get_online_feature_service(
|
|
|
307
310
|
|
|
308
311
|
Example::
|
|
309
312
|
|
|
310
|
-
svc = get_online_feature_service(vector_uri, entity_keys=[
|
|
313
|
+
svc = get_online_feature_service(vector_uri, entity_keys=["ticker"])
|
|
311
314
|
try:
|
|
312
315
|
resp = svc.get([{"ticker": "GOOG"}, {"ticker": "MSFT"}])
|
|
313
316
|
print(resp)
|
|
@@ -456,7 +459,7 @@ def ingest(
|
|
|
456
459
|
df = ingest(stocks_set, stocks, infer_options=fstore.InferOptions.default())
|
|
457
460
|
|
|
458
461
|
# for running as remote job
|
|
459
|
-
config = RunConfig(image=
|
|
462
|
+
config = RunConfig(image="mlrun/mlrun")
|
|
460
463
|
df = ingest(stocks_set, stocks, run_config=config)
|
|
461
464
|
|
|
462
465
|
# specify source and targets
|
|
@@ -337,7 +337,10 @@ class FeatureSet(ModelObj):
|
|
|
337
337
|
example::
|
|
338
338
|
|
|
339
339
|
import mlrun.feature_store as fstore
|
|
340
|
-
|
|
340
|
+
|
|
341
|
+
ticks = fstore.FeatureSet(
|
|
342
|
+
"ticks", entities=["stock"], timestamp_key="timestamp"
|
|
343
|
+
)
|
|
341
344
|
ticks.ingest(df)
|
|
342
345
|
|
|
343
346
|
:param name: name of the feature set
|
|
@@ -625,12 +628,12 @@ class FeatureSet(ModelObj):
|
|
|
625
628
|
|
|
626
629
|
import mlrun.feature_store as fstore
|
|
627
630
|
|
|
628
|
-
ticks = fstore.FeatureSet(
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
ticks.add_entity(
|
|
632
|
-
|
|
633
|
-
|
|
631
|
+
ticks = fstore.FeatureSet(
|
|
632
|
+
"ticks", entities=["stock"], timestamp_key="timestamp"
|
|
633
|
+
)
|
|
634
|
+
ticks.add_entity(
|
|
635
|
+
"country", mlrun.data_types.ValueType.STRING, description="stock country"
|
|
636
|
+
)
|
|
634
637
|
ticks.add_entity("year", mlrun.data_types.ValueType.INT16)
|
|
635
638
|
ticks.save()
|
|
636
639
|
|
|
@@ -650,13 +653,23 @@ class FeatureSet(ModelObj):
|
|
|
650
653
|
import mlrun.feature_store as fstore
|
|
651
654
|
from mlrun.features import Feature
|
|
652
655
|
|
|
653
|
-
ticks = fstore.FeatureSet(
|
|
654
|
-
|
|
655
|
-
|
|
656
|
-
ticks.add_feature(
|
|
657
|
-
|
|
658
|
-
|
|
659
|
-
|
|
656
|
+
ticks = fstore.FeatureSet(
|
|
657
|
+
"ticks", entities=["stock"], timestamp_key="timestamp"
|
|
658
|
+
)
|
|
659
|
+
ticks.add_feature(
|
|
660
|
+
Feature(
|
|
661
|
+
value_type=mlrun.data_types.ValueType.STRING,
|
|
662
|
+
description="client consistency",
|
|
663
|
+
),
|
|
664
|
+
"ABC01",
|
|
665
|
+
)
|
|
666
|
+
ticks.add_feature(
|
|
667
|
+
Feature(
|
|
668
|
+
value_type=mlrun.data_types.ValueType.FLOAT,
|
|
669
|
+
description="client volatility",
|
|
670
|
+
),
|
|
671
|
+
"SAB",
|
|
672
|
+
)
|
|
660
673
|
ticks.save()
|
|
661
674
|
|
|
662
675
|
:param feature: setting of Feature
|
|
@@ -860,15 +873,18 @@ class FeatureSet(ModelObj):
|
|
|
860
873
|
example::
|
|
861
874
|
|
|
862
875
|
import mlrun.feature_store as fstore
|
|
876
|
+
|
|
863
877
|
...
|
|
864
|
-
ticks = fstore.FeatureSet(
|
|
865
|
-
|
|
866
|
-
|
|
867
|
-
ticks.add_aggregation(
|
|
868
|
-
|
|
869
|
-
|
|
870
|
-
|
|
871
|
-
|
|
878
|
+
ticks = fstore.FeatureSet(
|
|
879
|
+
"ticks", entities=["stock"], timestamp_key="timestamp"
|
|
880
|
+
)
|
|
881
|
+
ticks.add_aggregation(
|
|
882
|
+
name="priceN",
|
|
883
|
+
column="price",
|
|
884
|
+
operations=["avg"],
|
|
885
|
+
windows=["1d"],
|
|
886
|
+
period="1h",
|
|
887
|
+
)
|
|
872
888
|
ticks.plot(rankdir="LR", with_targets=True)
|
|
873
889
|
|
|
874
890
|
:param filename: target filepath for the graph image (None for the notebook)
|
|
@@ -1005,7 +1021,7 @@ class FeatureSet(ModelObj):
|
|
|
1005
1021
|
df = stocks_set.ingest(stocks, infer_options=fstore.InferOptions.default())
|
|
1006
1022
|
|
|
1007
1023
|
# for running as remote job
|
|
1008
|
-
config = RunConfig(image=
|
|
1024
|
+
config = RunConfig(image="mlrun/mlrun")
|
|
1009
1025
|
df = ingest(stocks_set, stocks, run_config=config)
|
|
1010
1026
|
|
|
1011
1027
|
# specify source and targets
|
|
@@ -486,6 +486,7 @@ class FeatureVector(ModelObj):
|
|
|
486
486
|
example::
|
|
487
487
|
|
|
488
488
|
import mlrun.feature_store as fstore
|
|
489
|
+
|
|
489
490
|
features = ["quotes.bid", "quotes.asks_sum_5h as asks_5h", "stocks.*"]
|
|
490
491
|
vector = fstore.FeatureVector("my-vec", features)
|
|
491
492
|
|
|
@@ -852,7 +853,7 @@ class FeatureVector(ModelObj):
|
|
|
852
853
|
|
|
853
854
|
Example::
|
|
854
855
|
|
|
855
|
-
svc = vector_uri.get_online_feature_service(entity_keys=[
|
|
856
|
+
svc = vector_uri.get_online_feature_service(entity_keys=["ticker"])
|
|
856
857
|
try:
|
|
857
858
|
resp = svc.get([{"ticker": "GOOG"}, {"ticker": "MSFT"}])
|
|
858
859
|
print(resp)
|