mlrun 1.7.0rc14__py3-none-any.whl → 1.7.0rc21__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +10 -1
- mlrun/__main__.py +23 -111
- mlrun/alerts/__init__.py +15 -0
- mlrun/alerts/alert.py +144 -0
- mlrun/api/schemas/__init__.py +4 -3
- mlrun/artifacts/__init__.py +8 -3
- mlrun/artifacts/base.py +36 -253
- mlrun/artifacts/dataset.py +9 -190
- mlrun/artifacts/manager.py +46 -42
- mlrun/artifacts/model.py +9 -141
- mlrun/artifacts/plots.py +14 -375
- mlrun/common/constants.py +65 -3
- mlrun/common/formatters/__init__.py +19 -0
- mlrun/{runtimes/mpijob/v1alpha1.py → common/formatters/artifact.py} +6 -14
- mlrun/common/formatters/base.py +113 -0
- mlrun/common/formatters/function.py +46 -0
- mlrun/common/formatters/pipeline.py +53 -0
- mlrun/common/formatters/project.py +51 -0
- mlrun/{runtimes → common/runtimes}/constants.py +32 -4
- mlrun/common/schemas/__init__.py +10 -5
- mlrun/common/schemas/alert.py +92 -11
- mlrun/common/schemas/api_gateway.py +56 -0
- mlrun/common/schemas/artifact.py +15 -5
- mlrun/common/schemas/auth.py +2 -0
- mlrun/common/schemas/client_spec.py +1 -0
- mlrun/common/schemas/frontend_spec.py +1 -0
- mlrun/common/schemas/function.py +4 -0
- mlrun/common/schemas/model_monitoring/__init__.py +15 -3
- mlrun/common/schemas/model_monitoring/constants.py +58 -7
- mlrun/common/schemas/model_monitoring/grafana.py +9 -5
- mlrun/common/schemas/model_monitoring/model_endpoints.py +86 -2
- mlrun/common/schemas/pipeline.py +0 -9
- mlrun/common/schemas/project.py +5 -11
- mlrun/common/types.py +1 -0
- mlrun/config.py +27 -9
- mlrun/data_types/to_pandas.py +9 -9
- mlrun/datastore/base.py +41 -9
- mlrun/datastore/datastore.py +6 -2
- mlrun/datastore/datastore_profile.py +56 -4
- mlrun/datastore/inmem.py +2 -2
- mlrun/datastore/redis.py +2 -2
- mlrun/datastore/s3.py +5 -0
- mlrun/datastore/sources.py +147 -7
- mlrun/datastore/store_resources.py +7 -7
- mlrun/datastore/targets.py +110 -42
- mlrun/datastore/utils.py +42 -0
- mlrun/db/base.py +54 -10
- mlrun/db/httpdb.py +282 -79
- mlrun/db/nopdb.py +52 -10
- mlrun/errors.py +11 -0
- mlrun/execution.py +24 -9
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +12 -47
- mlrun/feature_store/feature_set.py +9 -0
- mlrun/feature_store/feature_vector.py +8 -0
- mlrun/feature_store/ingestion.py +7 -6
- mlrun/feature_store/retrieval/base.py +9 -4
- mlrun/feature_store/retrieval/conversion.py +9 -9
- mlrun/feature_store/retrieval/dask_merger.py +2 -0
- mlrun/feature_store/retrieval/job.py +9 -3
- mlrun/feature_store/retrieval/local_merger.py +2 -0
- mlrun/feature_store/retrieval/spark_merger.py +16 -0
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +7 -12
- mlrun/frameworks/parallel_coordinates.py +2 -1
- mlrun/frameworks/tf_keras/__init__.py +4 -1
- mlrun/k8s_utils.py +10 -11
- mlrun/launcher/base.py +4 -3
- mlrun/launcher/client.py +5 -3
- mlrun/launcher/local.py +8 -2
- mlrun/launcher/remote.py +8 -2
- mlrun/lists.py +6 -2
- mlrun/model.py +45 -21
- mlrun/model_monitoring/__init__.py +1 -1
- mlrun/model_monitoring/api.py +41 -18
- mlrun/model_monitoring/application.py +5 -305
- mlrun/model_monitoring/applications/__init__.py +11 -0
- mlrun/model_monitoring/applications/_application_steps.py +157 -0
- mlrun/model_monitoring/applications/base.py +280 -0
- mlrun/model_monitoring/applications/context.py +214 -0
- mlrun/model_monitoring/applications/evidently_base.py +211 -0
- mlrun/model_monitoring/applications/histogram_data_drift.py +132 -91
- mlrun/model_monitoring/applications/results.py +99 -0
- mlrun/model_monitoring/controller.py +3 -1
- mlrun/model_monitoring/db/__init__.py +2 -0
- mlrun/model_monitoring/db/stores/__init__.py +0 -2
- mlrun/model_monitoring/db/stores/base/store.py +22 -37
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +39 -8
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +27 -7
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +246 -224
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +232 -216
- mlrun/model_monitoring/db/tsdb/__init__.py +100 -0
- mlrun/model_monitoring/db/tsdb/base.py +329 -0
- mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
- mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
- mlrun/model_monitoring/db/tsdb/v3io/__init__.py +15 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +117 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +636 -0
- mlrun/model_monitoring/evidently_application.py +6 -118
- mlrun/model_monitoring/helpers.py +46 -1
- mlrun/model_monitoring/model_endpoint.py +3 -2
- mlrun/model_monitoring/stream_processing.py +57 -216
- mlrun/model_monitoring/writer.py +134 -124
- mlrun/package/utils/_formatter.py +2 -2
- mlrun/platforms/__init__.py +10 -9
- mlrun/platforms/iguazio.py +21 -202
- mlrun/projects/operations.py +19 -12
- mlrun/projects/pipelines.py +79 -102
- mlrun/projects/project.py +265 -103
- mlrun/render.py +15 -14
- mlrun/run.py +16 -46
- mlrun/runtimes/__init__.py +6 -3
- mlrun/runtimes/base.py +8 -7
- mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
- mlrun/runtimes/funcdoc.py +0 -28
- mlrun/runtimes/kubejob.py +2 -1
- mlrun/runtimes/local.py +5 -2
- mlrun/runtimes/mpijob/__init__.py +0 -20
- mlrun/runtimes/mpijob/v1.py +1 -1
- mlrun/runtimes/nuclio/api_gateway.py +194 -84
- mlrun/runtimes/nuclio/application/application.py +170 -8
- mlrun/runtimes/nuclio/function.py +39 -49
- mlrun/runtimes/pod.py +16 -36
- mlrun/runtimes/remotesparkjob.py +9 -3
- mlrun/runtimes/sparkjob/spark3job.py +1 -1
- mlrun/runtimes/utils.py +6 -45
- mlrun/serving/server.py +2 -1
- mlrun/serving/v2_serving.py +5 -1
- mlrun/track/tracker.py +2 -1
- mlrun/utils/async_http.py +25 -5
- mlrun/utils/helpers.py +107 -75
- mlrun/utils/logger.py +39 -7
- mlrun/utils/notifications/notification/__init__.py +14 -9
- mlrun/utils/notifications/notification/base.py +1 -1
- mlrun/utils/notifications/notification/slack.py +34 -7
- mlrun/utils/notifications/notification/webhook.py +1 -1
- mlrun/utils/notifications/notification_pusher.py +147 -16
- mlrun/utils/regex.py +9 -0
- mlrun/utils/v3io_clients.py +0 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/METADATA +14 -6
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/RECORD +150 -130
- mlrun/kfpops.py +0 -865
- mlrun/platforms/other.py +0 -305
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc14.dist-info → mlrun-1.7.0rc21.dist-info}/top_level.txt +0 -0
|
@@ -32,6 +32,7 @@ class SlackNotification(NotificationBase):
|
|
|
32
32
|
"completed": ":smiley:",
|
|
33
33
|
"running": ":man-running:",
|
|
34
34
|
"error": ":x:",
|
|
35
|
+
"skipped": ":zzz:",
|
|
35
36
|
}
|
|
36
37
|
|
|
37
38
|
async def push(
|
|
@@ -135,8 +136,16 @@ class SlackNotification(NotificationBase):
|
|
|
135
136
|
line = [
|
|
136
137
|
self._get_slack_row(f":bell: {alert.name} alert has occurred"),
|
|
137
138
|
self._get_slack_row(f"*Project:*\n{alert.project}"),
|
|
138
|
-
self._get_slack_row(f"*
|
|
139
|
+
self._get_slack_row(f"*ID:*\n{event_data.entity.ids[0]}"),
|
|
139
140
|
]
|
|
141
|
+
|
|
142
|
+
if alert.summary:
|
|
143
|
+
line.append(
|
|
144
|
+
self._get_slack_row(
|
|
145
|
+
f"*Summary:*\n{mlrun.utils.helpers.format_alert_summary(alert, event_data)}"
|
|
146
|
+
)
|
|
147
|
+
)
|
|
148
|
+
|
|
140
149
|
if event_data.value_dict:
|
|
141
150
|
data_lines = []
|
|
142
151
|
for key, value in event_data.value_dict.items():
|
|
@@ -144,32 +153,50 @@ class SlackNotification(NotificationBase):
|
|
|
144
153
|
data_text = "\n".join(data_lines)
|
|
145
154
|
line.append(self._get_slack_row(f"*Event data:*\n{data_text}"))
|
|
146
155
|
|
|
147
|
-
if
|
|
148
|
-
|
|
156
|
+
if (
|
|
157
|
+
event_data.entity.kind == mlrun.common.schemas.alert.EventEntityKind.JOB
|
|
158
|
+
): # JOB entity
|
|
159
|
+
uid = event_data.value_dict.get("uid")
|
|
160
|
+
url = mlrun.utils.helpers.get_ui_url(alert.project, uid)
|
|
161
|
+
overview_type = "Job overview"
|
|
162
|
+
else: # MODEL entity
|
|
163
|
+
model_name = event_data.value_dict.get("model")
|
|
164
|
+
model_endpoint_id = event_data.value_dict.get("model_endpoint_id")
|
|
165
|
+
url = mlrun.utils.helpers.get_model_endpoint_url(
|
|
166
|
+
alert.project, model_name, model_endpoint_id
|
|
167
|
+
)
|
|
168
|
+
overview_type = "Model endpoint"
|
|
169
|
+
|
|
170
|
+
line.append(self._get_slack_row(f"*Overview:*\n<{url}|*{overview_type}*>"))
|
|
149
171
|
|
|
150
172
|
return line
|
|
151
173
|
|
|
152
174
|
def _get_run_line(self, run: dict) -> dict:
|
|
153
175
|
meta = run["metadata"]
|
|
154
176
|
url = mlrun.utils.helpers.get_ui_url(meta.get("project"), meta.get("uid"))
|
|
155
|
-
|
|
177
|
+
|
|
178
|
+
# Only show the URL if the run is not a function (serving or mlrun function)
|
|
179
|
+
kind = run.get("step_kind")
|
|
180
|
+
state = run["status"].get("state", "")
|
|
181
|
+
if state != "skipped" and (url and not kind or kind == "run"):
|
|
156
182
|
line = f'<{url}|*{meta.get("name")}*>'
|
|
157
183
|
else:
|
|
158
184
|
line = meta.get("name")
|
|
159
|
-
|
|
185
|
+
if kind:
|
|
186
|
+
line = f'{line} *({run.get("step_kind", run.get("kind", ""))})*'
|
|
160
187
|
line = f'{self.emojis.get(state, ":question:")} {line}'
|
|
161
188
|
return self._get_slack_row(line)
|
|
162
189
|
|
|
163
190
|
def _get_run_result(self, run: dict) -> dict:
|
|
164
191
|
state = run["status"].get("state", "")
|
|
165
192
|
if state == "error":
|
|
166
|
-
error_status = run["status"].get("error", "")
|
|
193
|
+
error_status = run["status"].get("error", "") or state
|
|
167
194
|
result = f"*{error_status}*"
|
|
168
195
|
else:
|
|
169
196
|
result = mlrun.utils.helpers.dict_to_str(
|
|
170
197
|
run["status"].get("results", {}), ", "
|
|
171
198
|
)
|
|
172
|
-
return self._get_slack_row(result or
|
|
199
|
+
return self._get_slack_row(result or state)
|
|
173
200
|
|
|
174
201
|
@staticmethod
|
|
175
202
|
def _get_slack_row(text: str) -> dict:
|
|
@@ -57,7 +57,7 @@ class WebhookNotification(NotificationBase):
|
|
|
57
57
|
request_body["alert"] = alert.dict()
|
|
58
58
|
if event_data:
|
|
59
59
|
request_body["value"] = event_data.value_dict
|
|
60
|
-
request_body["id"] = event_data.entity.
|
|
60
|
+
request_body["id"] = event_data.entity.ids[0]
|
|
61
61
|
|
|
62
62
|
if custom_html:
|
|
63
63
|
request_body["custom_html"] = custom_html
|
|
@@ -15,10 +15,17 @@
|
|
|
15
15
|
import asyncio
|
|
16
16
|
import datetime
|
|
17
17
|
import os
|
|
18
|
+
import re
|
|
18
19
|
import traceback
|
|
19
20
|
import typing
|
|
20
21
|
from concurrent.futures import ThreadPoolExecutor
|
|
21
22
|
|
|
23
|
+
import kfp
|
|
24
|
+
import mlrun_pipelines.common.ops
|
|
25
|
+
import mlrun_pipelines.models
|
|
26
|
+
|
|
27
|
+
import mlrun.common.constants as mlrun_constants
|
|
28
|
+
import mlrun.common.runtimes.constants
|
|
22
29
|
import mlrun.common.schemas
|
|
23
30
|
import mlrun.config
|
|
24
31
|
import mlrun.db.base
|
|
@@ -233,25 +240,12 @@ class NotificationPusher(_NotificationPusherBase):
|
|
|
233
240
|
resource = "Run"
|
|
234
241
|
runs = [run.to_dict()]
|
|
235
242
|
|
|
236
|
-
if
|
|
237
|
-
resource =
|
|
243
|
+
if mlrun_constants.MLRunInternalLabels.workflow in run.metadata.labels:
|
|
244
|
+
resource = mlrun_constants.MLRunInternalLabels.workflow
|
|
238
245
|
custom_message = (
|
|
239
246
|
f" (workflow: {run.metadata.labels['workflow']}){custom_message}"
|
|
240
247
|
)
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
workflow_id = run.status.results.get("workflow_id", None)
|
|
244
|
-
if workflow_id:
|
|
245
|
-
workflow_runs = db.list_runs(
|
|
246
|
-
project=run.metadata.project,
|
|
247
|
-
labels=f"workflow={workflow_id}",
|
|
248
|
-
)
|
|
249
|
-
logger.debug(
|
|
250
|
-
"Found workflow runs, extending notification runs",
|
|
251
|
-
workflow_id=workflow_id,
|
|
252
|
-
workflow_runs_amount=len(workflow_runs),
|
|
253
|
-
)
|
|
254
|
-
runs.extend(workflow_runs)
|
|
248
|
+
runs.extend(self.get_workflow_steps(run))
|
|
255
249
|
|
|
256
250
|
message = (
|
|
257
251
|
self.messages.get(run.state(), "").format(resource=resource)
|
|
@@ -395,6 +389,137 @@ class NotificationPusher(_NotificationPusherBase):
|
|
|
395
389
|
mask_params=False,
|
|
396
390
|
)
|
|
397
391
|
|
|
392
|
+
def get_workflow_steps(self, run: mlrun.model.RunObject) -> list:
|
|
393
|
+
steps = []
|
|
394
|
+
db = mlrun.get_run_db()
|
|
395
|
+
|
|
396
|
+
def _add_run_step(_step: mlrun_pipelines.models.PipelineStep):
|
|
397
|
+
try:
|
|
398
|
+
_run = db.list_runs(
|
|
399
|
+
project=run.metadata.project,
|
|
400
|
+
labels=f"mlrun_constants.MLRunInternalLabels.runner_pod={_step.node_name}",
|
|
401
|
+
)[0]
|
|
402
|
+
except IndexError:
|
|
403
|
+
_run = {
|
|
404
|
+
"metadata": {
|
|
405
|
+
"name": _step.display_name,
|
|
406
|
+
"project": run.metadata.project,
|
|
407
|
+
},
|
|
408
|
+
}
|
|
409
|
+
_run["step_kind"] = _step.step_type
|
|
410
|
+
if _step.skipped:
|
|
411
|
+
_run.setdefault("status", {})["state"] = (
|
|
412
|
+
mlrun.common.runtimes.constants.RunStates.skipped
|
|
413
|
+
)
|
|
414
|
+
steps.append(_run)
|
|
415
|
+
|
|
416
|
+
def _add_deploy_function_step(_step: mlrun_pipelines.models.PipelineStep):
|
|
417
|
+
project, name, hash_key = self._extract_function_uri(
|
|
418
|
+
_step.get_annotation("mlrun/function-uri")
|
|
419
|
+
)
|
|
420
|
+
if name:
|
|
421
|
+
try:
|
|
422
|
+
function = db.get_function(
|
|
423
|
+
project=project, name=name, hash_key=hash_key
|
|
424
|
+
)
|
|
425
|
+
except mlrun.errors.MLRunNotFoundError:
|
|
426
|
+
# If the function is not found (if build failed for example), we will create a dummy
|
|
427
|
+
# function object for the notification to display the function name
|
|
428
|
+
function = {
|
|
429
|
+
"metadata": {
|
|
430
|
+
"name": name,
|
|
431
|
+
"project": project,
|
|
432
|
+
"hash_key": hash_key,
|
|
433
|
+
},
|
|
434
|
+
}
|
|
435
|
+
pod_phase = _step.phase
|
|
436
|
+
if _step.skipped:
|
|
437
|
+
state = mlrun.common.schemas.FunctionState.skipped
|
|
438
|
+
else:
|
|
439
|
+
state = mlrun.common.runtimes.constants.PodPhases.pod_phase_to_run_state(
|
|
440
|
+
pod_phase
|
|
441
|
+
)
|
|
442
|
+
function["status"] = {"state": state}
|
|
443
|
+
if isinstance(function["metadata"].get("updated"), datetime.datetime):
|
|
444
|
+
function["metadata"]["updated"] = function["metadata"][
|
|
445
|
+
"updated"
|
|
446
|
+
].isoformat()
|
|
447
|
+
function["step_kind"] = _step.step_type
|
|
448
|
+
steps.append(function)
|
|
449
|
+
|
|
450
|
+
step_methods = {
|
|
451
|
+
mlrun_pipelines.common.ops.PipelineRunType.run: _add_run_step,
|
|
452
|
+
mlrun_pipelines.common.ops.PipelineRunType.build: _add_deploy_function_step,
|
|
453
|
+
mlrun_pipelines.common.ops.PipelineRunType.deploy: _add_deploy_function_step,
|
|
454
|
+
}
|
|
455
|
+
|
|
456
|
+
workflow_id = run.status.results.get("workflow_id", None)
|
|
457
|
+
if not workflow_id:
|
|
458
|
+
return steps
|
|
459
|
+
|
|
460
|
+
workflow_manifest = self._get_workflow_manifest(workflow_id)
|
|
461
|
+
if not workflow_manifest:
|
|
462
|
+
return steps
|
|
463
|
+
|
|
464
|
+
try:
|
|
465
|
+
for step in workflow_manifest.get_steps():
|
|
466
|
+
step_method = step_methods.get(step.step_type)
|
|
467
|
+
if step_method:
|
|
468
|
+
step_method(step)
|
|
469
|
+
return steps
|
|
470
|
+
except Exception:
|
|
471
|
+
# If we fail to read the pipeline steps, we will return the list of runs that have the same workflow id
|
|
472
|
+
logger.warning(
|
|
473
|
+
"Failed to extract workflow steps from workflow manifest, "
|
|
474
|
+
"returning all runs with the workflow id label",
|
|
475
|
+
workflow_id=workflow_id,
|
|
476
|
+
traceback=traceback.format_exc(),
|
|
477
|
+
)
|
|
478
|
+
return db.list_runs(
|
|
479
|
+
project=run.metadata.project,
|
|
480
|
+
labels=f"workflow={workflow_id}",
|
|
481
|
+
)
|
|
482
|
+
|
|
483
|
+
@staticmethod
|
|
484
|
+
def _get_workflow_manifest(
|
|
485
|
+
workflow_id: str,
|
|
486
|
+
) -> typing.Optional[mlrun_pipelines.models.PipelineManifest]:
|
|
487
|
+
kfp_url = mlrun.mlconf.resolve_kfp_url(mlrun.mlconf.namespace)
|
|
488
|
+
if not kfp_url:
|
|
489
|
+
raise mlrun.errors.MLRunNotFoundError(
|
|
490
|
+
"KubeFlow Pipelines is not configured"
|
|
491
|
+
)
|
|
492
|
+
|
|
493
|
+
kfp_client = kfp.Client(host=kfp_url)
|
|
494
|
+
|
|
495
|
+
# arbitrary timeout of 5 seconds, the workflow should be done by now
|
|
496
|
+
kfp_run = kfp_client.wait_for_run_completion(workflow_id, 5)
|
|
497
|
+
if not kfp_run:
|
|
498
|
+
return None
|
|
499
|
+
|
|
500
|
+
kfp_run = mlrun_pipelines.models.PipelineRun(kfp_run)
|
|
501
|
+
return kfp_run.workflow_manifest()
|
|
502
|
+
|
|
503
|
+
def _extract_function_uri(self, function_uri: str) -> tuple[str, str, str]:
|
|
504
|
+
"""
|
|
505
|
+
Extract the project, name, and hash key from a function uri.
|
|
506
|
+
Examples:
|
|
507
|
+
- "project/name@hash_key" returns project, name, hash_key
|
|
508
|
+
- "project/name returns" project, name, ""
|
|
509
|
+
"""
|
|
510
|
+
project, name, hash_key = None, None, None
|
|
511
|
+
hashed_pattern = r"^(.+)/(.+)@(.+)$"
|
|
512
|
+
pattern = r"^(.+)/(.+)$"
|
|
513
|
+
match = re.match(hashed_pattern, function_uri)
|
|
514
|
+
if match:
|
|
515
|
+
project, name, hash_key = match.groups()
|
|
516
|
+
else:
|
|
517
|
+
match = re.match(pattern, function_uri)
|
|
518
|
+
if match:
|
|
519
|
+
project, name = match.groups()
|
|
520
|
+
hash_key = ""
|
|
521
|
+
return project, name, hash_key
|
|
522
|
+
|
|
398
523
|
|
|
399
524
|
class CustomNotificationPusher(_NotificationPusherBase):
|
|
400
525
|
def __init__(self, notification_types: list[str] = None):
|
|
@@ -413,6 +538,12 @@ class CustomNotificationPusher(_NotificationPusherBase):
|
|
|
413
538
|
if notification.is_async
|
|
414
539
|
}
|
|
415
540
|
|
|
541
|
+
@property
|
|
542
|
+
def notifications(self):
|
|
543
|
+
notifications = self._sync_notifications.copy()
|
|
544
|
+
notifications.update(self._async_notifications)
|
|
545
|
+
return notifications
|
|
546
|
+
|
|
416
547
|
def push(
|
|
417
548
|
self,
|
|
418
549
|
message: str,
|
mlrun/utils/regex.py
CHANGED
|
@@ -92,3 +92,12 @@ artifact_key = [r"[^\/\\]+$"]
|
|
|
92
92
|
# must be alphanumeric or _
|
|
93
93
|
# max 256 length
|
|
94
94
|
v3io_stream_consumer_group = [r"^(?!_)[a-zA-Z0-9_]{1,256}$"]
|
|
95
|
+
|
|
96
|
+
# URI patterns
|
|
97
|
+
run_uri_pattern = r"^(?P<project>.*)@(?P<uid>.*)\#(?P<iteration>.*?)(:(?P<tag>.*))?$"
|
|
98
|
+
|
|
99
|
+
artifact_uri_pattern = r"^((?P<project>.*)/)?(?P<key>.*?)(\#(?P<iteration>.*?))?(:(?P<tag>.*?))?(@(?P<tree>.*))?$"
|
|
100
|
+
|
|
101
|
+
artifact_producer_uri_pattern = (
|
|
102
|
+
r"^((?P<project>.*)/)?(?P<uid>.*?)(\-(?P<iteration>.*?))?$"
|
|
103
|
+
)
|
mlrun/utils/v3io_clients.py
CHANGED
|
@@ -11,7 +11,6 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
#
|
|
15
14
|
|
|
16
15
|
from v3io.dataplane import Client as V3IOClient
|
|
17
16
|
from v3io_frames import Client as get_client
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: mlrun
|
|
3
|
-
Version: 1.7.
|
|
3
|
+
Version: 1.7.0rc21
|
|
4
4
|
Summary: Tracking and config of machine learning runs
|
|
5
5
|
Home-page: https://github.com/mlrun/mlrun
|
|
6
6
|
Author: Yaron Haviv
|
|
@@ -26,10 +26,9 @@ Requires-Dist: GitPython >=3.1.41,~=3.1
|
|
|
26
26
|
Requires-Dist: aiohttp ~=3.9
|
|
27
27
|
Requires-Dist: aiohttp-retry ~=2.8
|
|
28
28
|
Requires-Dist: click ~=8.1
|
|
29
|
-
Requires-Dist: kfp ~=1.8
|
|
30
29
|
Requires-Dist: nest-asyncio ~=1.0
|
|
31
30
|
Requires-Dist: ipython ~=8.10
|
|
32
|
-
Requires-Dist: nuclio-jupyter ~=0.9.
|
|
31
|
+
Requires-Dist: nuclio-jupyter ~=0.9.17
|
|
33
32
|
Requires-Dist: numpy <1.27.0,>=1.16.5
|
|
34
33
|
Requires-Dist: pandas <2.2,>=1.2
|
|
35
34
|
Requires-Dist: pyarrow <15,>=10.0
|
|
@@ -44,13 +43,15 @@ Requires-Dist: semver ~=3.0
|
|
|
44
43
|
Requires-Dist: dependency-injector ~=4.41
|
|
45
44
|
Requires-Dist: fsspec <2024.4,>=2023.9.2
|
|
46
45
|
Requires-Dist: v3iofs ~=0.1.17
|
|
47
|
-
Requires-Dist: storey ~=1.7.
|
|
46
|
+
Requires-Dist: storey ~=1.7.17
|
|
48
47
|
Requires-Dist: inflection ~=0.5.0
|
|
49
48
|
Requires-Dist: python-dotenv ~=0.17.0
|
|
50
49
|
Requires-Dist: setuptools ~=69.1
|
|
51
50
|
Requires-Dist: deprecated ~=1.2
|
|
52
51
|
Requires-Dist: jinja2 >=3.1.3,~=3.1
|
|
53
52
|
Requires-Dist: orjson <4,>=3.9.15
|
|
53
|
+
Requires-Dist: mlrun-pipelines-kfp-common >0.1.0,~=0.1.1
|
|
54
|
+
Requires-Dist: mlrun-pipelines-kfp-v1-8 >0.1.0,~=0.1.1
|
|
54
55
|
Provides-Extra: alibaba-oss
|
|
55
56
|
Requires-Dist: ossfs ==2023.12.0 ; extra == 'alibaba-oss'
|
|
56
57
|
Requires-Dist: oss2 ==2.18.1 ; extra == 'alibaba-oss'
|
|
@@ -81,18 +82,20 @@ Requires-Dist: pyopenssl >=23 ; extra == 'all'
|
|
|
81
82
|
Requires-Dist: redis ~=4.3 ; extra == 'all'
|
|
82
83
|
Requires-Dist: s3fs <2024.4,>=2023.9.2 ; extra == 'all'
|
|
83
84
|
Requires-Dist: sqlalchemy ~=1.4 ; extra == 'all'
|
|
85
|
+
Requires-Dist: taos-ws-py ~=0.3.2 ; extra == 'all'
|
|
84
86
|
Provides-Extra: api
|
|
85
87
|
Requires-Dist: uvicorn ~=0.27.1 ; extra == 'api'
|
|
86
88
|
Requires-Dist: dask-kubernetes ~=0.11.0 ; extra == 'api'
|
|
87
89
|
Requires-Dist: apscheduler <4,>=3.10.3 ; extra == 'api'
|
|
88
90
|
Requires-Dist: objgraph ~=3.6 ; extra == 'api'
|
|
89
|
-
Requires-Dist: igz-mgmt ~=0.
|
|
91
|
+
Requires-Dist: igz-mgmt ~=0.2.0 ; extra == 'api'
|
|
90
92
|
Requires-Dist: humanfriendly ~=10.0 ; extra == 'api'
|
|
91
93
|
Requires-Dist: fastapi ~=0.110.0 ; extra == 'api'
|
|
92
94
|
Requires-Dist: sqlalchemy ~=1.4 ; extra == 'api'
|
|
93
95
|
Requires-Dist: pymysql ~=1.0 ; extra == 'api'
|
|
94
96
|
Requires-Dist: alembic ~=1.9 ; extra == 'api'
|
|
95
97
|
Requires-Dist: timelength ~=1.1 ; extra == 'api'
|
|
98
|
+
Requires-Dist: memray ~=1.12 ; extra == 'api'
|
|
96
99
|
Provides-Extra: azure-blob-storage
|
|
97
100
|
Requires-Dist: msrest ~=0.6.21 ; extra == 'azure-blob-storage'
|
|
98
101
|
Requires-Dist: azure-core ~=1.24 ; extra == 'azure-blob-storage'
|
|
@@ -128,6 +131,7 @@ Requires-Dist: pyopenssl >=23 ; extra == 'complete'
|
|
|
128
131
|
Requires-Dist: redis ~=4.3 ; extra == 'complete'
|
|
129
132
|
Requires-Dist: s3fs <2024.4,>=2023.9.2 ; extra == 'complete'
|
|
130
133
|
Requires-Dist: sqlalchemy ~=1.4 ; extra == 'complete'
|
|
134
|
+
Requires-Dist: taos-ws-py ~=0.3.2 ; extra == 'complete'
|
|
131
135
|
Provides-Extra: complete-api
|
|
132
136
|
Requires-Dist: adlfs ==2023.9.0 ; extra == 'complete-api'
|
|
133
137
|
Requires-Dist: aiobotocore <2.8,>=2.5.0 ; extra == 'complete-api'
|
|
@@ -147,8 +151,9 @@ Requires-Dist: gcsfs <2024.4,>=2023.9.2 ; extra == 'complete-api'
|
|
|
147
151
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ==3.14.1 ; extra == 'complete-api'
|
|
148
152
|
Requires-Dist: graphviz ~=0.20.0 ; extra == 'complete-api'
|
|
149
153
|
Requires-Dist: humanfriendly ~=10.0 ; extra == 'complete-api'
|
|
150
|
-
Requires-Dist: igz-mgmt ~=0.
|
|
154
|
+
Requires-Dist: igz-mgmt ~=0.2.0 ; extra == 'complete-api'
|
|
151
155
|
Requires-Dist: kafka-python ~=2.0 ; extra == 'complete-api'
|
|
156
|
+
Requires-Dist: memray ~=1.12 ; extra == 'complete-api'
|
|
152
157
|
Requires-Dist: mlflow ~=2.8 ; extra == 'complete-api'
|
|
153
158
|
Requires-Dist: msrest ~=0.6.21 ; extra == 'complete-api'
|
|
154
159
|
Requires-Dist: objgraph ~=3.6 ; extra == 'complete-api'
|
|
@@ -160,6 +165,7 @@ Requires-Dist: pyopenssl >=23 ; extra == 'complete-api'
|
|
|
160
165
|
Requires-Dist: redis ~=4.3 ; extra == 'complete-api'
|
|
161
166
|
Requires-Dist: s3fs <2024.4,>=2023.9.2 ; extra == 'complete-api'
|
|
162
167
|
Requires-Dist: sqlalchemy ~=1.4 ; extra == 'complete-api'
|
|
168
|
+
Requires-Dist: taos-ws-py ~=0.3.2 ; extra == 'complete-api'
|
|
163
169
|
Requires-Dist: timelength ~=1.1 ; extra == 'complete-api'
|
|
164
170
|
Requires-Dist: uvicorn ~=0.27.1 ; extra == 'complete-api'
|
|
165
171
|
Provides-Extra: dask
|
|
@@ -192,6 +198,8 @@ Requires-Dist: aiobotocore <2.8,>=2.5.0 ; extra == 's3'
|
|
|
192
198
|
Requires-Dist: s3fs <2024.4,>=2023.9.2 ; extra == 's3'
|
|
193
199
|
Provides-Extra: sqlalchemy
|
|
194
200
|
Requires-Dist: sqlalchemy ~=1.4 ; extra == 'sqlalchemy'
|
|
201
|
+
Provides-Extra: tdengine
|
|
202
|
+
Requires-Dist: taos-ws-py ~=0.3.2 ; extra == 'tdengine'
|
|
195
203
|
|
|
196
204
|
<a id="top"></a>
|
|
197
205
|
[](https://github.com/mlrun/mlrun/actions/workflows/build.yaml?query=branch%3Adevelopment)
|