mlrun 1.7.0rc9__py3-none-any.whl → 1.7.0rc12__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +1 -0
- mlrun/artifacts/manager.py +17 -6
- mlrun/artifacts/model.py +29 -25
- mlrun/common/schemas/__init__.py +1 -0
- mlrun/common/schemas/alert.py +122 -0
- mlrun/common/schemas/auth.py +4 -0
- mlrun/common/schemas/client_spec.py +1 -0
- mlrun/common/schemas/model_monitoring/constants.py +3 -1
- mlrun/config.py +6 -3
- mlrun/datastore/__init__.py +4 -3
- mlrun/datastore/base.py +6 -5
- mlrun/datastore/sources.py +9 -4
- mlrun/datastore/targets.py +11 -3
- mlrun/datastore/utils.py +16 -0
- mlrun/datastore/v3io.py +27 -50
- mlrun/db/base.py +44 -2
- mlrun/db/httpdb.py +192 -20
- mlrun/db/nopdb.py +36 -1
- mlrun/execution.py +21 -14
- mlrun/feature_store/api.py +6 -3
- mlrun/feature_store/feature_set.py +39 -23
- mlrun/feature_store/feature_vector.py +2 -1
- mlrun/feature_store/steps.py +30 -19
- mlrun/features.py +4 -13
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +2 -2
- mlrun/frameworks/lgbm/__init__.py +1 -1
- mlrun/frameworks/lgbm/callbacks/callback.py +2 -4
- mlrun/frameworks/lgbm/model_handler.py +1 -1
- mlrun/frameworks/pytorch/__init__.py +2 -2
- mlrun/frameworks/sklearn/__init__.py +1 -1
- mlrun/frameworks/tf_keras/__init__.py +1 -1
- mlrun/frameworks/xgboost/__init__.py +1 -1
- mlrun/model.py +2 -2
- mlrun/model_monitoring/application.py +11 -2
- mlrun/model_monitoring/applications/histogram_data_drift.py +3 -3
- mlrun/model_monitoring/controller.py +2 -3
- mlrun/model_monitoring/stream_processing.py +0 -1
- mlrun/model_monitoring/writer.py +32 -0
- mlrun/package/packagers_manager.py +1 -0
- mlrun/platforms/__init__.py +1 -1
- mlrun/platforms/other.py +1 -1
- mlrun/projects/operations.py +11 -4
- mlrun/projects/project.py +168 -62
- mlrun/run.py +72 -40
- mlrun/runtimes/mpijob/abstract.py +8 -8
- mlrun/runtimes/nuclio/function.py +9 -5
- mlrun/runtimes/nuclio/serving.py +12 -14
- mlrun/runtimes/pod.py +3 -3
- mlrun/secrets.py +6 -2
- mlrun/serving/routers.py +3 -1
- mlrun/serving/states.py +9 -35
- mlrun/serving/v2_serving.py +4 -4
- mlrun/utils/helpers.py +1 -1
- mlrun/utils/notifications/notification/base.py +12 -0
- mlrun/utils/notifications/notification/console.py +2 -0
- mlrun/utils/notifications/notification/git.py +3 -1
- mlrun/utils/notifications/notification/ipython.py +2 -0
- mlrun/utils/notifications/notification/slack.py +41 -13
- mlrun/utils/notifications/notification/webhook.py +11 -1
- mlrun/utils/retryer.py +2 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/RECORD +67 -67
- mlrun/datastore/helpers.py +0 -18
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc9.dist-info → mlrun-1.7.0rc12.dist-info}/top_level.txt +0 -0
|
@@ -345,17 +345,21 @@ class RemoteRuntime(KubeResource):
|
|
|
345
345
|
|
|
346
346
|
git::
|
|
347
347
|
|
|
348
|
-
fn.with_source_archive(
|
|
349
|
-
|
|
350
|
-
|
|
348
|
+
fn.with_source_archive(
|
|
349
|
+
"git://github.com/org/repo#my-branch",
|
|
350
|
+
handler="main:handler",
|
|
351
|
+
workdir="path/inside/repo",
|
|
352
|
+
)
|
|
351
353
|
|
|
352
354
|
s3::
|
|
353
355
|
|
|
354
356
|
fn.spec.nuclio_runtime = "golang"
|
|
355
|
-
fn.with_source_archive(
|
|
357
|
+
fn.with_source_archive(
|
|
358
|
+
"s3://my-bucket/path/in/bucket/my-functions-archive",
|
|
356
359
|
handler="my_func:Handler",
|
|
357
360
|
workdir="path/inside/functions/archive",
|
|
358
|
-
runtime="golang"
|
|
361
|
+
runtime="golang",
|
|
362
|
+
)
|
|
359
363
|
"""
|
|
360
364
|
self.spec.build.source = source
|
|
361
365
|
# update handler in function_handler
|
mlrun/runtimes/nuclio/serving.py
CHANGED
|
@@ -23,7 +23,7 @@ from nuclio import KafkaTrigger
|
|
|
23
23
|
|
|
24
24
|
import mlrun
|
|
25
25
|
import mlrun.common.schemas
|
|
26
|
-
from mlrun.datastore import parse_kafka_url
|
|
26
|
+
from mlrun.datastore import get_kafka_brokers_from_dict, parse_kafka_url
|
|
27
27
|
from mlrun.model import ObjectList
|
|
28
28
|
from mlrun.runtimes.function_reference import FunctionReference
|
|
29
29
|
from mlrun.secrets import SecretsStore
|
|
@@ -295,9 +295,7 @@ class ServingRuntime(RemoteRuntime):
|
|
|
295
295
|
"provided class is not a router step, must provide a router class in router topology"
|
|
296
296
|
)
|
|
297
297
|
else:
|
|
298
|
-
step = RouterStep(
|
|
299
|
-
class_name=class_name, class_args=class_args, engine=engine
|
|
300
|
-
)
|
|
298
|
+
step = RouterStep(class_name=class_name, class_args=class_args)
|
|
301
299
|
self.spec.graph = step
|
|
302
300
|
elif topology == StepKinds.flow:
|
|
303
301
|
self.spec.graph = RootFlowStep(engine=engine)
|
|
@@ -367,8 +365,8 @@ class ServingRuntime(RemoteRuntime):
|
|
|
367
365
|
|
|
368
366
|
Example, create a function (from the notebook), add a model class, and deploy::
|
|
369
367
|
|
|
370
|
-
fn = code_to_function(kind=
|
|
371
|
-
fn.add_model(
|
|
368
|
+
fn = code_to_function(kind="serving")
|
|
369
|
+
fn.add_model("boost", model_path, model_class="MyClass", my_arg=5)
|
|
372
370
|
fn.deploy()
|
|
373
371
|
|
|
374
372
|
only works with router topology, for nested topologies (model under router under flow)
|
|
@@ -450,7 +448,7 @@ class ServingRuntime(RemoteRuntime):
|
|
|
450
448
|
|
|
451
449
|
example::
|
|
452
450
|
|
|
453
|
-
fn.add_child_function(
|
|
451
|
+
fn.add_child_function("enrich", "./enrich.ipynb", "mlrun/mlrun")
|
|
454
452
|
|
|
455
453
|
:param name: child function name
|
|
456
454
|
:param url: function/code url, support .py, .ipynb, .yaml extensions
|
|
@@ -489,11 +487,8 @@ class ServingRuntime(RemoteRuntime):
|
|
|
489
487
|
"worker_allocation_mode", "static"
|
|
490
488
|
)
|
|
491
489
|
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
or "kafka_brokers" in stream.options
|
|
495
|
-
):
|
|
496
|
-
brokers = stream.options.get("kafka_brokers")
|
|
490
|
+
brokers = get_kafka_brokers_from_dict(stream.options)
|
|
491
|
+
if stream.path.startswith("kafka://") or brokers:
|
|
497
492
|
if brokers:
|
|
498
493
|
brokers = brokers.split(",")
|
|
499
494
|
topic, brokers = parse_kafka_url(stream.path, brokers)
|
|
@@ -731,8 +726,11 @@ class ServingRuntime(RemoteRuntime):
|
|
|
731
726
|
example::
|
|
732
727
|
|
|
733
728
|
serving_fn = mlrun.new_function("serving", image="mlrun/mlrun", kind="serving")
|
|
734
|
-
serving_fn.add_model(
|
|
735
|
-
|
|
729
|
+
serving_fn.add_model(
|
|
730
|
+
"my-classifier",
|
|
731
|
+
model_path=model_path,
|
|
732
|
+
class_name="mlrun.frameworks.sklearn.SklearnModelServer",
|
|
733
|
+
)
|
|
736
734
|
serving_fn.plot(rankdir="LR")
|
|
737
735
|
|
|
738
736
|
:param filename: target filepath for the image (None for the notebook)
|
mlrun/runtimes/pod.py
CHANGED
|
@@ -1278,9 +1278,9 @@ class KubeResource(BaseRuntime):
|
|
|
1278
1278
|
from kubernetes import client as k8s_client
|
|
1279
1279
|
|
|
1280
1280
|
security_context = k8s_client.V1SecurityContext(
|
|
1281
|
-
|
|
1282
|
-
|
|
1283
|
-
|
|
1281
|
+
run_as_user=1000,
|
|
1282
|
+
run_as_group=3000,
|
|
1283
|
+
)
|
|
1284
1284
|
function.with_security_context(security_context)
|
|
1285
1285
|
|
|
1286
1286
|
More info:
|
mlrun/secrets.py
CHANGED
|
@@ -163,15 +163,19 @@ def get_secret_or_env(
|
|
|
163
163
|
|
|
164
164
|
Example::
|
|
165
165
|
|
|
166
|
-
secrets = {
|
|
166
|
+
secrets = {"KEY1": "VALUE1"}
|
|
167
167
|
secret = get_secret_or_env("KEY1", secret_provider=secrets)
|
|
168
168
|
|
|
169
|
+
|
|
169
170
|
# Using a function to retrieve a secret
|
|
170
171
|
def my_secret_provider(key):
|
|
171
172
|
# some internal logic to retrieve secret
|
|
172
173
|
return value
|
|
173
174
|
|
|
174
|
-
|
|
175
|
+
|
|
176
|
+
secret = get_secret_or_env(
|
|
177
|
+
"KEY1", secret_provider=my_secret_provider, default="TOO-MANY-SECRETS"
|
|
178
|
+
)
|
|
175
179
|
|
|
176
180
|
:param key: Secret key to look for
|
|
177
181
|
:param secret_provider: Dictionary, callable or `SecretsStore` to extract the secret value from. If using a
|
mlrun/serving/routers.py
CHANGED
|
@@ -272,7 +272,9 @@ class ParallelRun(BaseModelRouter):
|
|
|
272
272
|
fn = mlrun.new_function("parallel", kind="serving")
|
|
273
273
|
graph = fn.set_topology(
|
|
274
274
|
"router",
|
|
275
|
-
mlrun.serving.routers.ParallelRun(
|
|
275
|
+
mlrun.serving.routers.ParallelRun(
|
|
276
|
+
extend_event=True, executor_type=executor
|
|
277
|
+
),
|
|
276
278
|
)
|
|
277
279
|
graph.add_route("child1", class_name="Cls1")
|
|
278
280
|
graph.add_route("child2", class_name="Cls2", my_arg={"c": 7})
|
mlrun/serving/states.py
CHANGED
|
@@ -25,7 +25,10 @@ import mlrun
|
|
|
25
25
|
|
|
26
26
|
from ..config import config
|
|
27
27
|
from ..datastore import get_stream_pusher
|
|
28
|
-
from ..datastore.utils import
|
|
28
|
+
from ..datastore.utils import (
|
|
29
|
+
get_kafka_brokers_from_dict,
|
|
30
|
+
parse_kafka_url,
|
|
31
|
+
)
|
|
29
32
|
from ..errors import MLRunInvalidArgumentError, err_to_str
|
|
30
33
|
from ..model import ModelObj, ObjectDict
|
|
31
34
|
from ..platforms.iguazio import parse_path
|
|
@@ -590,7 +593,7 @@ class RouterStep(TaskStep):
|
|
|
590
593
|
|
|
591
594
|
kind = "router"
|
|
592
595
|
default_shape = "doubleoctagon"
|
|
593
|
-
_dict_fields = _task_step_fields + ["routes"
|
|
596
|
+
_dict_fields = _task_step_fields + ["routes"]
|
|
594
597
|
_default_class = "mlrun.serving.ModelRouter"
|
|
595
598
|
|
|
596
599
|
def __init__(
|
|
@@ -603,7 +606,6 @@ class RouterStep(TaskStep):
|
|
|
603
606
|
function: str = None,
|
|
604
607
|
input_path: str = None,
|
|
605
608
|
result_path: str = None,
|
|
606
|
-
engine: str = None,
|
|
607
609
|
):
|
|
608
610
|
super().__init__(
|
|
609
611
|
class_name,
|
|
@@ -616,8 +618,6 @@ class RouterStep(TaskStep):
|
|
|
616
618
|
)
|
|
617
619
|
self._routes: ObjectDict = None
|
|
618
620
|
self.routes = routes
|
|
619
|
-
self.engine = engine
|
|
620
|
-
self._controller = None
|
|
621
621
|
|
|
622
622
|
def get_children(self):
|
|
623
623
|
"""get child steps (routes)"""
|
|
@@ -687,33 +687,6 @@ class RouterStep(TaskStep):
|
|
|
687
687
|
self._set_error_handler()
|
|
688
688
|
self._post_init(mode)
|
|
689
689
|
|
|
690
|
-
if self.engine == "async":
|
|
691
|
-
self._build_async_flow()
|
|
692
|
-
self._run_async_flow()
|
|
693
|
-
|
|
694
|
-
def _build_async_flow(self):
|
|
695
|
-
"""initialize and build the async/storey DAG"""
|
|
696
|
-
|
|
697
|
-
self.respond()
|
|
698
|
-
source, self._wait_for_result = _init_async_objects(self.context, [self])
|
|
699
|
-
source.to(self.async_object)
|
|
700
|
-
|
|
701
|
-
self._async_flow = source
|
|
702
|
-
|
|
703
|
-
def _run_async_flow(self):
|
|
704
|
-
self._controller = self._async_flow.run()
|
|
705
|
-
|
|
706
|
-
def run(self, event, *args, **kwargs):
|
|
707
|
-
if self._controller:
|
|
708
|
-
# async flow (using storey)
|
|
709
|
-
event._awaitable_result = None
|
|
710
|
-
resp = self._controller.emit(
|
|
711
|
-
event, return_awaitable_result=self._wait_for_result
|
|
712
|
-
)
|
|
713
|
-
return resp.await_result()
|
|
714
|
-
|
|
715
|
-
return super().run(event, *args, **kwargs)
|
|
716
|
-
|
|
717
690
|
def __getitem__(self, name):
|
|
718
691
|
return self._routes[name]
|
|
719
692
|
|
|
@@ -1524,13 +1497,14 @@ def _init_async_objects(context, steps):
|
|
|
1524
1497
|
endpoint = None
|
|
1525
1498
|
options = {}
|
|
1526
1499
|
options.update(step.options)
|
|
1527
|
-
|
|
1500
|
+
|
|
1501
|
+
kafka_brokers = get_kafka_brokers_from_dict(options, pop=True)
|
|
1502
|
+
|
|
1528
1503
|
if stream_path.startswith("kafka://") or kafka_brokers:
|
|
1529
1504
|
topic, brokers = parse_kafka_url(stream_path, kafka_brokers)
|
|
1530
1505
|
|
|
1531
1506
|
kafka_producer_options = options.pop(
|
|
1532
|
-
"kafka_producer_options",
|
|
1533
|
-
options.pop("kafka_bootstrap_servers", None),
|
|
1507
|
+
"kafka_producer_options", None
|
|
1534
1508
|
)
|
|
1535
1509
|
|
|
1536
1510
|
step._async_object = storey.KafkaTarget(
|
mlrun/serving/v2_serving.py
CHANGED
|
@@ -63,11 +63,11 @@ class V2ModelServer(StepToDict):
|
|
|
63
63
|
class MyClass(V2ModelServer):
|
|
64
64
|
def load(self):
|
|
65
65
|
# load and initialize the model and/or other elements
|
|
66
|
-
model_file, extra_data = self.get_model(suffix=
|
|
66
|
+
model_file, extra_data = self.get_model(suffix=".pkl")
|
|
67
67
|
self.model = load(open(model_file, "rb"))
|
|
68
68
|
|
|
69
69
|
def predict(self, request):
|
|
70
|
-
events = np.array(request[
|
|
70
|
+
events = np.array(request["inputs"])
|
|
71
71
|
dmatrix = xgb.DMatrix(events)
|
|
72
72
|
result: xgb.DMatrix = self.model.predict(dmatrix)
|
|
73
73
|
return {"outputs": result.tolist()}
|
|
@@ -176,9 +176,9 @@ class V2ModelServer(StepToDict):
|
|
|
176
176
|
::
|
|
177
177
|
|
|
178
178
|
def load(self):
|
|
179
|
-
model_file, extra_data = self.get_model(suffix=
|
|
179
|
+
model_file, extra_data = self.get_model(suffix=".pkl")
|
|
180
180
|
self.model = load(open(model_file, "rb"))
|
|
181
|
-
categories = extra_data[
|
|
181
|
+
categories = extra_data["categories"].as_df()
|
|
182
182
|
|
|
183
183
|
Parameters
|
|
184
184
|
----------
|
mlrun/utils/helpers.py
CHANGED
|
@@ -44,6 +44,8 @@ class NotificationBase:
|
|
|
44
44
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
45
45
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
46
46
|
custom_html: str = None,
|
|
47
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
48
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
47
49
|
):
|
|
48
50
|
raise NotImplementedError()
|
|
49
51
|
|
|
@@ -61,6 +63,8 @@ class NotificationBase:
|
|
|
61
63
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
62
64
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
63
65
|
custom_html: str = None,
|
|
66
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
67
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
64
68
|
) -> str:
|
|
65
69
|
if custom_html:
|
|
66
70
|
return custom_html
|
|
@@ -68,6 +72,14 @@ class NotificationBase:
|
|
|
68
72
|
if self.name:
|
|
69
73
|
message = f"{self.name}: {message}"
|
|
70
74
|
|
|
75
|
+
if alert:
|
|
76
|
+
if not event_data:
|
|
77
|
+
return f"[{severity}] {message}"
|
|
78
|
+
return (
|
|
79
|
+
f"[{severity}] {message} for project {alert.project} "
|
|
80
|
+
f"UID {event_data.entity.id}. Value {event_data.value}"
|
|
81
|
+
)
|
|
82
|
+
|
|
71
83
|
if not runs:
|
|
72
84
|
return f"[{severity}] {message}"
|
|
73
85
|
|
|
@@ -36,6 +36,8 @@ class ConsoleNotification(NotificationBase):
|
|
|
36
36
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
37
37
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
38
38
|
custom_html: str = None,
|
|
39
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
40
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
39
41
|
):
|
|
40
42
|
severity = self._resolve_severity(severity)
|
|
41
43
|
print(f"[{severity}] {message}")
|
|
@@ -38,6 +38,8 @@ class GitNotification(NotificationBase):
|
|
|
38
38
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
39
39
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
40
40
|
custom_html: str = None,
|
|
41
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
42
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
41
43
|
):
|
|
42
44
|
git_repo = self.params.get("repo", None)
|
|
43
45
|
git_issue = self.params.get("issue", None)
|
|
@@ -50,7 +52,7 @@ class GitNotification(NotificationBase):
|
|
|
50
52
|
server = self.params.get("server", None)
|
|
51
53
|
gitlab = self.params.get("gitlab", False)
|
|
52
54
|
await self._pr_comment(
|
|
53
|
-
self._get_html(message, severity, runs, custom_html),
|
|
55
|
+
self._get_html(message, severity, runs, custom_html, alert, event_data),
|
|
54
56
|
git_repo,
|
|
55
57
|
git_issue,
|
|
56
58
|
merge_request=git_merge_request,
|
|
@@ -53,6 +53,8 @@ class IPythonNotification(NotificationBase):
|
|
|
53
53
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
54
54
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
55
55
|
custom_html: str = None,
|
|
56
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
57
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
56
58
|
):
|
|
57
59
|
if not self._ipython:
|
|
58
60
|
mlrun.utils.helpers.logger.debug(
|
|
@@ -42,6 +42,8 @@ class SlackNotification(NotificationBase):
|
|
|
42
42
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
43
43
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
44
44
|
custom_html: str = None,
|
|
45
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
46
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
45
47
|
):
|
|
46
48
|
webhook = self.params.get("webhook", None) or mlrun.get_secret_or_env(
|
|
47
49
|
"SLACK_WEBHOOK"
|
|
@@ -53,7 +55,7 @@ class SlackNotification(NotificationBase):
|
|
|
53
55
|
)
|
|
54
56
|
return
|
|
55
57
|
|
|
56
|
-
data = self._generate_slack_data(message, severity, runs)
|
|
58
|
+
data = self._generate_slack_data(message, severity, runs, alert, event_data)
|
|
57
59
|
|
|
58
60
|
async with aiohttp.ClientSession() as session:
|
|
59
61
|
async with session.post(webhook, json=data) as response:
|
|
@@ -66,12 +68,14 @@ class SlackNotification(NotificationBase):
|
|
|
66
68
|
mlrun.common.schemas.NotificationSeverity, str
|
|
67
69
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
68
70
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
71
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
72
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
69
73
|
) -> dict:
|
|
70
74
|
data = {
|
|
71
75
|
"blocks": [
|
|
72
76
|
{
|
|
73
|
-
"type": "
|
|
74
|
-
"text":
|
|
77
|
+
"type": "header",
|
|
78
|
+
"text": {"type": "plain_text", "text": f"[{severity}] {message}"},
|
|
75
79
|
},
|
|
76
80
|
]
|
|
77
81
|
}
|
|
@@ -80,22 +84,46 @@ class SlackNotification(NotificationBase):
|
|
|
80
84
|
{"type": "section", "text": self._get_slack_row(self.name)}
|
|
81
85
|
)
|
|
82
86
|
|
|
83
|
-
if
|
|
84
|
-
|
|
87
|
+
if alert:
|
|
88
|
+
fields = self._get_alert_fields(alert, event_data)
|
|
85
89
|
|
|
86
|
-
|
|
87
|
-
|
|
90
|
+
for i in range(len(fields)):
|
|
91
|
+
data["blocks"].append({"type": "section", "text": fields[i]})
|
|
92
|
+
else:
|
|
93
|
+
if not runs:
|
|
94
|
+
return data
|
|
95
|
+
|
|
96
|
+
if isinstance(runs, list):
|
|
97
|
+
runs = mlrun.lists.RunList(runs)
|
|
88
98
|
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
99
|
+
fields = [self._get_slack_row("*Runs*"), self._get_slack_row("*Results*")]
|
|
100
|
+
for run in runs:
|
|
101
|
+
fields.append(self._get_run_line(run))
|
|
102
|
+
fields.append(self._get_run_result(run))
|
|
93
103
|
|
|
94
|
-
|
|
95
|
-
|
|
104
|
+
for i in range(0, len(fields), 8):
|
|
105
|
+
data["blocks"].append({"type": "section", "fields": fields[i : i + 8]})
|
|
96
106
|
|
|
97
107
|
return data
|
|
98
108
|
|
|
109
|
+
def _get_alert_fields(
|
|
110
|
+
self,
|
|
111
|
+
alert: mlrun.common.schemas.AlertConfig,
|
|
112
|
+
event_data: mlrun.common.schemas.Event,
|
|
113
|
+
) -> list:
|
|
114
|
+
line = [
|
|
115
|
+
self._get_slack_row(f":bell: {alert.name} alert has occurred"),
|
|
116
|
+
self._get_slack_row(f"*Project:*\n{alert.project}"),
|
|
117
|
+
self._get_slack_row(f"*UID:*\n{event_data.entity.id}"),
|
|
118
|
+
]
|
|
119
|
+
if event_data.value is not None:
|
|
120
|
+
line.append(self._get_slack_row(f"*Event data:*\n{event_data.value}"))
|
|
121
|
+
|
|
122
|
+
if url := mlrun.utils.helpers.get_ui_url(alert.project, event_data.entity.id):
|
|
123
|
+
line.append(self._get_slack_row(f"*Overview:*\n<{url}|*Job overview*>"))
|
|
124
|
+
|
|
125
|
+
return line
|
|
126
|
+
|
|
99
127
|
def _get_run_line(self, run: dict) -> dict:
|
|
100
128
|
meta = run["metadata"]
|
|
101
129
|
url = mlrun.utils.helpers.get_ui_url(meta.get("project"), meta.get("uid"))
|
|
@@ -36,6 +36,8 @@ class WebhookNotification(NotificationBase):
|
|
|
36
36
|
] = mlrun.common.schemas.NotificationSeverity.INFO,
|
|
37
37
|
runs: typing.Union[mlrun.lists.RunList, list] = None,
|
|
38
38
|
custom_html: str = None,
|
|
39
|
+
alert: mlrun.common.schemas.AlertConfig = None,
|
|
40
|
+
event_data: mlrun.common.schemas.Event = None,
|
|
39
41
|
):
|
|
40
42
|
url = self.params.get("url", None)
|
|
41
43
|
method = self.params.get("method", "post").lower()
|
|
@@ -46,9 +48,17 @@ class WebhookNotification(NotificationBase):
|
|
|
46
48
|
request_body = {
|
|
47
49
|
"message": message,
|
|
48
50
|
"severity": severity,
|
|
49
|
-
"runs": runs,
|
|
50
51
|
}
|
|
51
52
|
|
|
53
|
+
if runs:
|
|
54
|
+
request_body["runs"] = runs
|
|
55
|
+
|
|
56
|
+
if alert:
|
|
57
|
+
request_body["alert"] = alert.dict()
|
|
58
|
+
if event_data:
|
|
59
|
+
request_body["value"] = event_data.value
|
|
60
|
+
request_body["id"] = event_data.entity.id
|
|
61
|
+
|
|
52
62
|
if custom_html:
|
|
53
63
|
request_body["custom_html"] = custom_html
|
|
54
64
|
|
mlrun/utils/retryer.py
CHANGED
|
@@ -117,7 +117,7 @@ class Retryer:
|
|
|
117
117
|
self._raise_last_exception()
|
|
118
118
|
|
|
119
119
|
def _prepare(self):
|
|
120
|
-
self.start_time = time.
|
|
120
|
+
self.start_time = time.monotonic()
|
|
121
121
|
self.last_exception = None
|
|
122
122
|
|
|
123
123
|
# Check if backoff is just a simple interval
|
|
@@ -173,7 +173,7 @@ class Retryer:
|
|
|
173
173
|
) from self.last_exception
|
|
174
174
|
|
|
175
175
|
def _timeout_exceeded(self, next_interval=None):
|
|
176
|
-
now = time.
|
|
176
|
+
now = time.monotonic()
|
|
177
177
|
if next_interval:
|
|
178
178
|
now = now + next_interval
|
|
179
179
|
return self.timeout is not None and now >= self.start_time + self.timeout
|
mlrun/utils/version/version.json
CHANGED