mlrun 1.4.0rc25__py3-none-any.whl → 1.5.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (184) hide show
  1. mlrun/__init__.py +2 -35
  2. mlrun/__main__.py +3 -41
  3. mlrun/api/api/api.py +6 -0
  4. mlrun/api/api/endpoints/feature_store.py +0 -4
  5. mlrun/api/api/endpoints/files.py +14 -2
  6. mlrun/api/api/endpoints/frontend_spec.py +2 -1
  7. mlrun/api/api/endpoints/functions.py +95 -59
  8. mlrun/api/api/endpoints/grafana_proxy.py +9 -9
  9. mlrun/api/api/endpoints/logs.py +17 -3
  10. mlrun/api/api/endpoints/model_endpoints.py +3 -2
  11. mlrun/api/api/endpoints/pipelines.py +1 -5
  12. mlrun/api/api/endpoints/projects.py +88 -0
  13. mlrun/api/api/endpoints/runs.py +48 -6
  14. mlrun/api/api/endpoints/submit.py +2 -1
  15. mlrun/api/api/endpoints/workflows.py +355 -0
  16. mlrun/api/api/utils.py +3 -4
  17. mlrun/api/crud/__init__.py +1 -0
  18. mlrun/api/crud/client_spec.py +6 -2
  19. mlrun/api/crud/feature_store.py +5 -0
  20. mlrun/api/crud/model_monitoring/__init__.py +1 -0
  21. mlrun/api/crud/model_monitoring/deployment.py +497 -0
  22. mlrun/api/crud/model_monitoring/grafana.py +96 -42
  23. mlrun/api/crud/model_monitoring/helpers.py +159 -0
  24. mlrun/api/crud/model_monitoring/model_endpoints.py +202 -476
  25. mlrun/api/crud/notifications.py +9 -4
  26. mlrun/api/crud/pipelines.py +6 -11
  27. mlrun/api/crud/projects.py +2 -2
  28. mlrun/api/crud/runtime_resources.py +4 -3
  29. mlrun/api/crud/runtimes/nuclio/helpers.py +5 -1
  30. mlrun/api/crud/secrets.py +21 -0
  31. mlrun/api/crud/workflows.py +352 -0
  32. mlrun/api/db/base.py +16 -1
  33. mlrun/api/db/init_db.py +2 -4
  34. mlrun/api/db/session.py +1 -1
  35. mlrun/api/db/sqldb/db.py +129 -31
  36. mlrun/api/db/sqldb/models/models_mysql.py +15 -1
  37. mlrun/api/db/sqldb/models/models_sqlite.py +16 -2
  38. mlrun/api/launcher.py +38 -6
  39. mlrun/api/main.py +3 -2
  40. mlrun/api/rundb/__init__.py +13 -0
  41. mlrun/{db → api/rundb}/sqldb.py +36 -84
  42. mlrun/api/runtime_handlers/__init__.py +56 -0
  43. mlrun/api/runtime_handlers/base.py +1247 -0
  44. mlrun/api/runtime_handlers/daskjob.py +209 -0
  45. mlrun/api/runtime_handlers/kubejob.py +37 -0
  46. mlrun/api/runtime_handlers/mpijob.py +147 -0
  47. mlrun/api/runtime_handlers/remotesparkjob.py +29 -0
  48. mlrun/api/runtime_handlers/sparkjob.py +148 -0
  49. mlrun/api/schemas/__init__.py +17 -6
  50. mlrun/api/utils/builder.py +1 -4
  51. mlrun/api/utils/clients/chief.py +14 -0
  52. mlrun/api/utils/clients/iguazio.py +33 -33
  53. mlrun/api/utils/clients/nuclio.py +2 -2
  54. mlrun/api/utils/periodic.py +9 -2
  55. mlrun/api/utils/projects/follower.py +14 -7
  56. mlrun/api/utils/projects/leader.py +2 -1
  57. mlrun/api/utils/projects/remotes/nop_follower.py +2 -2
  58. mlrun/api/utils/projects/remotes/nop_leader.py +2 -2
  59. mlrun/api/utils/runtimes/__init__.py +14 -0
  60. mlrun/api/utils/runtimes/nuclio.py +43 -0
  61. mlrun/api/utils/scheduler.py +98 -15
  62. mlrun/api/utils/singletons/db.py +5 -1
  63. mlrun/api/utils/singletons/project_member.py +4 -1
  64. mlrun/api/utils/singletons/scheduler.py +1 -1
  65. mlrun/artifacts/base.py +6 -6
  66. mlrun/artifacts/dataset.py +4 -4
  67. mlrun/artifacts/manager.py +2 -3
  68. mlrun/artifacts/model.py +2 -2
  69. mlrun/artifacts/plots.py +8 -8
  70. mlrun/common/db/__init__.py +14 -0
  71. mlrun/common/helpers.py +37 -0
  72. mlrun/{mlutils → common/model_monitoring}/__init__.py +3 -2
  73. mlrun/common/model_monitoring/helpers.py +69 -0
  74. mlrun/common/schemas/__init__.py +13 -1
  75. mlrun/common/schemas/auth.py +4 -1
  76. mlrun/common/schemas/client_spec.py +1 -1
  77. mlrun/common/schemas/function.py +17 -0
  78. mlrun/common/schemas/model_monitoring/__init__.py +48 -0
  79. mlrun/common/{model_monitoring.py → schemas/model_monitoring/constants.py} +11 -23
  80. mlrun/common/schemas/model_monitoring/grafana.py +55 -0
  81. mlrun/common/schemas/{model_endpoints.py → model_monitoring/model_endpoints.py} +32 -65
  82. mlrun/common/schemas/notification.py +1 -0
  83. mlrun/common/schemas/object.py +4 -0
  84. mlrun/common/schemas/project.py +1 -0
  85. mlrun/common/schemas/regex.py +1 -1
  86. mlrun/common/schemas/runs.py +1 -8
  87. mlrun/common/schemas/schedule.py +1 -8
  88. mlrun/common/schemas/workflow.py +54 -0
  89. mlrun/config.py +45 -42
  90. mlrun/datastore/__init__.py +21 -0
  91. mlrun/datastore/base.py +1 -1
  92. mlrun/datastore/datastore.py +9 -0
  93. mlrun/datastore/dbfs_store.py +168 -0
  94. mlrun/datastore/helpers.py +18 -0
  95. mlrun/datastore/sources.py +1 -0
  96. mlrun/datastore/store_resources.py +2 -5
  97. mlrun/datastore/v3io.py +1 -2
  98. mlrun/db/__init__.py +4 -68
  99. mlrun/db/base.py +12 -0
  100. mlrun/db/factory.py +65 -0
  101. mlrun/db/httpdb.py +175 -20
  102. mlrun/db/nopdb.py +4 -2
  103. mlrun/execution.py +4 -2
  104. mlrun/feature_store/__init__.py +1 -0
  105. mlrun/feature_store/api.py +1 -2
  106. mlrun/feature_store/common.py +2 -1
  107. mlrun/feature_store/feature_set.py +1 -11
  108. mlrun/feature_store/feature_vector.py +340 -2
  109. mlrun/feature_store/ingestion.py +5 -10
  110. mlrun/feature_store/retrieval/base.py +118 -104
  111. mlrun/feature_store/retrieval/dask_merger.py +17 -10
  112. mlrun/feature_store/retrieval/job.py +4 -1
  113. mlrun/feature_store/retrieval/local_merger.py +18 -18
  114. mlrun/feature_store/retrieval/spark_merger.py +21 -14
  115. mlrun/feature_store/retrieval/storey_merger.py +22 -16
  116. mlrun/kfpops.py +3 -9
  117. mlrun/launcher/base.py +57 -53
  118. mlrun/launcher/client.py +5 -4
  119. mlrun/launcher/factory.py +24 -13
  120. mlrun/launcher/local.py +6 -6
  121. mlrun/launcher/remote.py +4 -4
  122. mlrun/lists.py +0 -11
  123. mlrun/model.py +11 -17
  124. mlrun/model_monitoring/__init__.py +2 -22
  125. mlrun/model_monitoring/features_drift_table.py +1 -1
  126. mlrun/model_monitoring/helpers.py +22 -210
  127. mlrun/model_monitoring/model_endpoint.py +1 -1
  128. mlrun/model_monitoring/model_monitoring_batch.py +127 -50
  129. mlrun/model_monitoring/prometheus.py +219 -0
  130. mlrun/model_monitoring/stores/__init__.py +16 -11
  131. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +95 -23
  132. mlrun/model_monitoring/stores/models/mysql.py +47 -29
  133. mlrun/model_monitoring/stores/models/sqlite.py +47 -29
  134. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +31 -19
  135. mlrun/model_monitoring/{stream_processing_fs.py → stream_processing.py} +206 -64
  136. mlrun/model_monitoring/tracking_policy.py +104 -0
  137. mlrun/package/packager.py +6 -8
  138. mlrun/package/packagers/default_packager.py +121 -10
  139. mlrun/package/packagers/numpy_packagers.py +1 -1
  140. mlrun/platforms/__init__.py +0 -2
  141. mlrun/platforms/iguazio.py +0 -56
  142. mlrun/projects/pipelines.py +53 -159
  143. mlrun/projects/project.py +10 -37
  144. mlrun/render.py +1 -1
  145. mlrun/run.py +8 -124
  146. mlrun/runtimes/__init__.py +6 -42
  147. mlrun/runtimes/base.py +29 -1249
  148. mlrun/runtimes/daskjob.py +2 -198
  149. mlrun/runtimes/funcdoc.py +0 -9
  150. mlrun/runtimes/function.py +25 -29
  151. mlrun/runtimes/kubejob.py +5 -29
  152. mlrun/runtimes/local.py +1 -1
  153. mlrun/runtimes/mpijob/__init__.py +2 -2
  154. mlrun/runtimes/mpijob/abstract.py +10 -1
  155. mlrun/runtimes/mpijob/v1.py +0 -76
  156. mlrun/runtimes/mpijob/v1alpha1.py +1 -74
  157. mlrun/runtimes/nuclio.py +3 -2
  158. mlrun/runtimes/pod.py +28 -18
  159. mlrun/runtimes/remotesparkjob.py +1 -15
  160. mlrun/runtimes/serving.py +14 -6
  161. mlrun/runtimes/sparkjob/__init__.py +0 -1
  162. mlrun/runtimes/sparkjob/abstract.py +4 -131
  163. mlrun/runtimes/utils.py +0 -26
  164. mlrun/serving/routers.py +7 -7
  165. mlrun/serving/server.py +11 -8
  166. mlrun/serving/states.py +7 -1
  167. mlrun/serving/v2_serving.py +6 -6
  168. mlrun/utils/helpers.py +23 -42
  169. mlrun/utils/notifications/notification/__init__.py +4 -0
  170. mlrun/utils/notifications/notification/webhook.py +61 -0
  171. mlrun/utils/notifications/notification_pusher.py +5 -25
  172. mlrun/utils/regex.py +7 -2
  173. mlrun/utils/version/version.json +2 -2
  174. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/METADATA +26 -25
  175. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/RECORD +180 -158
  176. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/WHEEL +1 -1
  177. mlrun/mlutils/data.py +0 -160
  178. mlrun/mlutils/models.py +0 -78
  179. mlrun/mlutils/plots.py +0 -902
  180. mlrun/utils/model_monitoring.py +0 -249
  181. /mlrun/{api/db/sqldb/session.py → common/db/sql_session.py} +0 -0
  182. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/LICENSE +0 -0
  183. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/entry_points.txt +0 -0
  184. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/top_level.txt +0 -0
@@ -41,8 +41,9 @@ class SparkFeatureMerger(BaseMerger):
41
41
  self,
42
42
  entity_df,
43
43
  entity_timestamp_column: str,
44
- featureset,
45
- featureset_df,
44
+ featureset_name: str,
45
+ featureset_timstamp: str,
46
+ featureset_df: list,
46
47
  left_keys: list,
47
48
  right_keys: list,
48
49
  ):
@@ -54,13 +55,15 @@ class SparkFeatureMerger(BaseMerger):
54
55
  the feature tables.
55
56
  entity_timestamp_column (str): Column name in entity_df which represents
56
57
  event timestamp.
57
- featureset_df (Dataframe): Spark dataframe representing the feature table.
58
+ featureset (Dataframe): Spark dataframe representing the feature table.
58
59
  featureset (FeatureSet): Feature set specification, which provides information on
59
60
  how the join should be performed, such as the entity primary keys.
60
61
  Returns:
61
62
  DataFrame: Join result, which contains all the original columns from entity_df, as well
62
63
  as all the features specified in featureset, where the feature columns will
63
64
  be prefixed with featureset_df name.
65
+ :param featureset_name:
66
+ :param featureset_timstamp:
64
67
  """
65
68
 
66
69
  from pyspark.sql import Window
@@ -68,7 +71,7 @@ class SparkFeatureMerger(BaseMerger):
68
71
 
69
72
  entity_with_id = entity_df.withColumn("_row_nr", monotonically_increasing_id())
70
73
  rename_right_keys = {}
71
- for key in right_keys + [featureset.spec.timestamp_key]:
74
+ for key in right_keys + [featureset_timstamp]:
72
75
  if key in entity_df.columns:
73
76
  rename_right_keys[key] = f"ft__{key}"
74
77
  # get columns for projection
@@ -79,7 +82,7 @@ class SparkFeatureMerger(BaseMerger):
79
82
 
80
83
  aliased_featureset_df = featureset_df.select(projection)
81
84
  right_timestamp = rename_right_keys.get(
82
- featureset.spec.timestamp_key, featureset.spec.timestamp_key
85
+ featureset_timstamp, featureset_timstamp
83
86
  )
84
87
 
85
88
  # set join conditions
@@ -106,7 +109,7 @@ class SparkFeatureMerger(BaseMerger):
106
109
  "_rank", row_number().over(window)
107
110
  ).filter(col("_rank") == 1)
108
111
 
109
- for key in right_keys + [featureset.spec.timestamp_key]:
112
+ for key in right_keys + [featureset_timstamp]:
110
113
  if key in entity_df.columns + [entity_timestamp_column]:
111
114
  filter_most_recent_feature_timestamp = (
112
115
  filter_most_recent_feature_timestamp.drop(
@@ -121,7 +124,8 @@ class SparkFeatureMerger(BaseMerger):
121
124
  self,
122
125
  entity_df,
123
126
  entity_timestamp_column: str,
124
- featureset,
127
+ featureset_name,
128
+ featureset_timestamp,
125
129
  featureset_df,
126
130
  left_keys: list,
127
131
  right_keys: list,
@@ -130,20 +134,18 @@ class SparkFeatureMerger(BaseMerger):
130
134
  """
131
135
  spark dataframes join
132
136
 
133
- Args:
134
- entity_df (DataFrame): Spark dataframe representing the entities, to be joined with
137
+ :param entity_df (DataFrame): Spark dataframe representing the entities, to be joined with
135
138
  the feature tables.
136
- entity_timestamp_column (str): Column name in entity_df which represents
139
+ :param entity_timestamp_column (str): Column name in entity_df which represents
137
140
  event timestamp.
138
- featureset_df (Dataframe): Spark dataframe representing the feature table.
139
- featureset (FeatureSet): Feature set specification, which provide information on
140
- how the join should be performed, such as the entity primary keys.
141
+ :param featureset_df (Dataframe): Spark dataframe representing the feature table.
142
+ :param featureset_name:
143
+ :param featureset_timestamp:
141
144
 
142
145
  Returns:
143
146
  DataFrame: Join result, which contains all the original columns from entity_df, as well
144
147
  as all the features specified in featureset, where the feature columns will
145
148
  be prefixed with featureset_df name.
146
-
147
149
  """
148
150
  if left_keys != right_keys:
149
151
  join_cond = [
@@ -270,3 +272,8 @@ class SparkFeatureMerger(BaseMerger):
270
272
  self._result_df = self._result_df.orderBy(
271
273
  *[col(col_name).asc_nulls_last() for col_name in order_by_active]
272
274
  )
275
+
276
+ def _convert_entity_rows_to_engine_df(self, entity_rows):
277
+ if entity_rows is not None and not hasattr(entity_rows, "rdd"):
278
+ return self.spark.createDataFrame(entity_rows)
279
+ return entity_rows
@@ -1,4 +1,4 @@
1
- # Copyright 2018 Iguazio
1
+ # Copyright 2023 Iguazio
2
2
  #
3
3
  # Licensed under the Apache License, Version 2.0 (the "License");
4
4
  # you may not use this file except in compliance with the License.
@@ -41,45 +41,45 @@ class StoreyFeatureMerger(BaseMerger):
41
41
  )
42
42
  next = graph
43
43
 
44
- fs_link_list = self._create_linked_relation_list(
44
+ join_graph = self._get_graph(
45
45
  feature_set_objects, feature_set_fields, entity_keys
46
46
  )
47
47
 
48
48
  all_columns = []
49
49
  save_column = []
50
50
  entity_keys = []
51
+ del_columns = []
51
52
  end_aliases = {}
52
- for node in fs_link_list:
53
- name = node.name
54
- if name == self._entity_rows_node_name:
55
- continue
56
- featureset = feature_set_objects[name]
53
+ for step in join_graph.steps:
54
+ name = step.right_feature_set_name
55
+ feature_set = feature_set_objects[name]
57
56
  columns = feature_set_fields[name]
58
57
  column_names = [name for name, alias in columns]
59
58
  aliases = {name: alias for name, alias in columns if alias}
60
59
  all_columns += [aliases.get(name, name) for name in column_names]
61
- for col in node.data["save_cols"]:
60
+ saved_columns_for_relation = list(
61
+ self.vector.get_feature_set_relations(feature_set).keys()
62
+ )
63
+
64
+ for col in saved_columns_for_relation:
62
65
  if col not in column_names:
63
66
  column_names.append(col)
67
+ del_columns.append(col)
64
68
  else:
65
69
  save_column.append(col)
66
70
 
67
- entity_list = node.data["right_keys"] or list(
68
- featureset.spec.entities.keys()
69
- )
71
+ entity_list = step.right_keys or list(feature_set.spec.entities.keys())
70
72
  if not entity_keys:
71
73
  # if entity_keys not provided by the user we will set it to be the entity of the first feature set
72
74
  entity_keys = entity_list
73
75
  end_aliases.update(
74
76
  {
75
77
  k: v
76
- for k, v in zip(entity_list, node.data["left_keys"])
78
+ for k, v in zip(entity_list, step.left_keys)
77
79
  if k != v and v in save_column
78
80
  }
79
81
  )
80
- mapping = {
81
- k: v for k, v in zip(node.data["left_keys"], entity_list) if k != v
82
- }
82
+ mapping = {k: v for k, v in zip(step.left_keys, entity_list) if k != v}
83
83
  if mapping:
84
84
  next = next.to(
85
85
  "storey.Rename",
@@ -91,7 +91,7 @@ class StoreyFeatureMerger(BaseMerger):
91
91
  "storey.QueryByKey",
92
92
  f"query-{name}",
93
93
  features=column_names,
94
- table=featureset.uri,
94
+ table=feature_set.uri,
95
95
  key_field=entity_list,
96
96
  aliases=aliases,
97
97
  fixed_window_type=fixed_window_type.to_qbk_fixed_window_type(),
@@ -103,6 +103,12 @@ class StoreyFeatureMerger(BaseMerger):
103
103
  "rename-entity-to-features",
104
104
  mapping=end_aliases,
105
105
  )
106
+ if del_columns:
107
+ next = next.to(
108
+ "storey.flow.DropColumns",
109
+ "drop-unnecessary-columns",
110
+ columns=del_columns,
111
+ )
106
112
  for name in start_states:
107
113
  next.set_next(name)
108
114
 
mlrun/kfpops.py CHANGED
@@ -26,7 +26,6 @@ import mlrun
26
26
  from mlrun.errors import err_to_str
27
27
 
28
28
  from .config import config
29
- from .db import get_or_set_dburl, get_run_db
30
29
  from .model import HyperParamOptions, RunSpec
31
30
  from .utils import (
32
31
  dict_to_yaml,
@@ -297,7 +296,7 @@ def mlrun_op(
297
296
  outputs = [] if outputs is None else outputs
298
297
  labels = {} if labels is None else labels
299
298
 
300
- rundb = rundb or get_or_set_dburl()
299
+ rundb = rundb or mlrun.db.get_or_set_dburl()
301
300
  cmd = [
302
301
  "python",
303
302
  "-m",
@@ -732,7 +731,7 @@ def generate_kfp_dag_and_resolve_project(run, project=None):
732
731
  return dag, project, workflow["status"].get("message", "")
733
732
 
734
733
 
735
- def format_summary_from_kfp_run(kfp_run, project=None, session=None):
734
+ def format_summary_from_kfp_run(kfp_run, project=None):
736
735
  override_project = project if project and project != "*" else None
737
736
  dag, project, message = generate_kfp_dag_and_resolve_project(
738
737
  kfp_run, override_project
@@ -740,12 +739,7 @@ def format_summary_from_kfp_run(kfp_run, project=None, session=None):
740
739
  run_id = get_in(kfp_run, "run.id")
741
740
 
742
741
  # enrich DAG with mlrun run info
743
- if session:
744
- runs = mlrun.api.utils.singletons.db.get_db().list_runs(
745
- session, project=project, labels=f"workflow={run_id}"
746
- )
747
- else:
748
- runs = get_run_db().list_runs(project=project, labels=f"workflow={run_id}")
742
+ runs = mlrun.db.get_run_db().list_runs(project=project, labels=f"workflow={run_id}")
749
743
 
750
744
  for run in runs:
751
745
  step = get_in(run, ["metadata", "labels", "mlrun/runner-pod"])
mlrun/launcher/base.py CHANGED
@@ -25,6 +25,7 @@ import mlrun.kfpops
25
25
  import mlrun.lists
26
26
  import mlrun.model
27
27
  import mlrun.runtimes
28
+ import mlrun.utils.regex
28
29
  from mlrun.utils import logger
29
30
 
30
31
  run_modes = ["pass"]
@@ -38,6 +39,56 @@ class BaseLauncher(abc.ABC):
38
39
  Each context will have its own implementation of the abstract methods while the common logic resides in this class
39
40
  """
40
41
 
42
+ def __init__(self, **kwargs):
43
+ pass
44
+
45
+ @abc.abstractmethod
46
+ def launch(
47
+ self,
48
+ runtime: "mlrun.runtimes.BaseRuntime",
49
+ task: Optional[
50
+ Union["mlrun.run.RunTemplate", "mlrun.run.RunObject", dict]
51
+ ] = None,
52
+ handler: Optional[Union[str, Callable]] = None,
53
+ name: Optional[str] = "",
54
+ project: Optional[str] = "",
55
+ params: Optional[dict] = None,
56
+ inputs: Optional[Dict[str, str]] = None,
57
+ out_path: Optional[str] = "",
58
+ workdir: Optional[str] = "",
59
+ artifact_path: Optional[str] = "",
60
+ watch: Optional[bool] = True,
61
+ schedule: Optional[
62
+ Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
63
+ ] = None,
64
+ hyperparams: Dict[str, list] = None,
65
+ hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
66
+ verbose: Optional[bool] = None,
67
+ scrape_metrics: Optional[bool] = None,
68
+ local_code_path: Optional[str] = None,
69
+ auto_build: Optional[bool] = None,
70
+ param_file_secrets: Optional[Dict[str, str]] = None,
71
+ notifications: Optional[List[mlrun.model.Notification]] = None,
72
+ returns: Optional[List[Union[str, Dict[str, str]]]] = None,
73
+ ) -> "mlrun.run.RunObject":
74
+ """run the function from the server/client[local/remote]"""
75
+ pass
76
+
77
+ @abc.abstractmethod
78
+ def enrich_runtime(
79
+ self,
80
+ runtime: "mlrun.runtimes.base.BaseRuntime",
81
+ project_name: Optional[str] = "",
82
+ ):
83
+ pass
84
+
85
+ @staticmethod
86
+ @abc.abstractmethod
87
+ def _store_function(
88
+ runtime: "mlrun.runtimes.BaseRuntime", run: "mlrun.run.RunObject"
89
+ ):
90
+ pass
91
+
41
92
  def save_function(
42
93
  self,
43
94
  runtime: "mlrun.runtimes.BaseRuntime",
@@ -73,36 +124,9 @@ class BaseLauncher(abc.ABC):
73
124
  hash_key = hash_key if versioned else None
74
125
  return "db://" + runtime._function_uri(hash_key=hash_key, tag=tag)
75
126
 
76
- @abc.abstractmethod
77
- def launch(
78
- self,
79
- runtime: "mlrun.runtimes.BaseRuntime",
80
- task: Optional[
81
- Union["mlrun.run.RunTemplate", "mlrun.run.RunObject", dict]
82
- ] = None,
83
- handler: Optional[Union[str, Callable]] = None,
84
- name: Optional[str] = "",
85
- project: Optional[str] = "",
86
- params: Optional[dict] = None,
87
- inputs: Optional[Dict[str, str]] = None,
88
- out_path: Optional[str] = "",
89
- workdir: Optional[str] = "",
90
- artifact_path: Optional[str] = "",
91
- watch: Optional[bool] = True,
92
- schedule: Optional[
93
- Union[str, mlrun.common.schemas.schedule.ScheduleCronTrigger]
94
- ] = None,
95
- hyperparams: Dict[str, list] = None,
96
- hyper_param_options: Optional[mlrun.model.HyperParamOptions] = None,
97
- verbose: Optional[bool] = None,
98
- scrape_metrics: Optional[bool] = None,
99
- local_code_path: Optional[str] = None,
100
- auto_build: Optional[bool] = None,
101
- param_file_secrets: Optional[Dict[str, str]] = None,
102
- notifications: Optional[List[mlrun.model.Notification]] = None,
103
- returns: Optional[List[Union[str, Dict[str, str]]]] = None,
104
- ) -> "mlrun.run.RunObject":
105
- """run the function from the server/client[local/remote]"""
127
+ @staticmethod
128
+ def prepare_image_for_deploy(runtime: "mlrun.runtimes.BaseRuntime"):
129
+ """Check if the runtime requires to build the image and updates the spec accordingly"""
106
130
  pass
107
131
 
108
132
  def _validate_runtime(
@@ -190,8 +214,8 @@ class BaseLauncher(abc.ABC):
190
214
  # task is already a RunObject
191
215
  return task
192
216
 
217
+ @staticmethod
193
218
  def _enrich_run(
194
- self,
195
219
  runtime,
196
220
  run,
197
221
  handler=None,
@@ -361,8 +385,8 @@ class BaseLauncher(abc.ABC):
361
385
  name=run.metadata.name,
362
386
  )
363
387
  if run.status.state in [
364
- mlrun.runtimes.base.RunStates.error,
365
- mlrun.runtimes.base.RunStates.aborted,
388
+ mlrun.runtimes.constants.RunStates.error,
389
+ mlrun.runtimes.constants.RunStates.aborted,
366
390
  ]:
367
391
  if runtime._is_remote and not runtime.is_child:
368
392
  logger.error(
@@ -379,26 +403,6 @@ class BaseLauncher(abc.ABC):
379
403
  def _refresh_function_metadata(runtime: "mlrun.runtimes.BaseRuntime"):
380
404
  pass
381
405
 
382
- @staticmethod
383
- def prepare_image_for_deploy(runtime: "mlrun.runtimes.BaseRuntime"):
384
- """Check if the runtime requires to build the image and updates the spec accordingly"""
385
- pass
386
-
387
- @staticmethod
388
- @abc.abstractmethod
389
- def enrich_runtime(
390
- runtime: "mlrun.runtimes.base.BaseRuntime",
391
- project_name: Optional[str] = "",
392
- ):
393
- pass
394
-
395
- @staticmethod
396
- @abc.abstractmethod
397
- def _store_function(
398
- runtime: "mlrun.runtimes.BaseRuntime", run: "mlrun.run.RunObject"
399
- ):
400
- pass
401
-
402
406
  @staticmethod
403
407
  def _log_track_results(
404
408
  runtime: "mlrun.runtimes.BaseRuntime", result: dict, run: "mlrun.run.RunObject"
mlrun/launcher/client.py CHANGED
@@ -19,21 +19,22 @@ from typing import Optional
19
19
  import IPython
20
20
 
21
21
  import mlrun.errors
22
- import mlrun.launcher.base
22
+ import mlrun.launcher.base as launcher
23
23
  import mlrun.lists
24
24
  import mlrun.model
25
25
  import mlrun.runtimes
26
26
  from mlrun.utils import logger
27
27
 
28
28
 
29
- class ClientBaseLauncher(mlrun.launcher.base.BaseLauncher, abc.ABC):
29
+ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
30
30
  """
31
31
  Abstract class for common code between client launchers
32
32
  """
33
33
 
34
- @staticmethod
35
34
  def enrich_runtime(
36
- runtime: "mlrun.runtimes.base.BaseRuntime", project_name: Optional[str] = ""
35
+ self,
36
+ runtime: "mlrun.runtimes.base.BaseRuntime",
37
+ project_name: Optional[str] = "",
37
38
  ):
38
39
  runtime.try_auto_mount_based_on_config()
39
40
  runtime._fill_credentials()
mlrun/launcher/factory.py CHANGED
@@ -11,17 +11,24 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+ from dependency_injector import containers, providers
15
+
14
16
  import mlrun.config
15
17
  import mlrun.errors
16
18
  import mlrun.launcher.base
17
19
  import mlrun.launcher.local
18
20
  import mlrun.launcher.remote
21
+ import mlrun.utils.singleton
22
+
19
23
 
24
+ class LauncherFactory(
25
+ metaclass=mlrun.utils.singleton.AbstractSingleton,
26
+ ):
27
+ def __init__(self):
28
+ self._launcher_container = LauncherContainer()
20
29
 
21
- class LauncherFactory(object):
22
- @staticmethod
23
30
  def create_launcher(
24
- is_remote: bool, local: bool = False
31
+ self, is_remote: bool, **kwargs
25
32
  ) -> mlrun.launcher.base.BaseLauncher:
26
33
  """
27
34
  Creates the appropriate launcher for the specified run.
@@ -30,21 +37,25 @@ class LauncherFactory(object):
30
37
  ClientLocalLauncher - if the run is not remote or local was specified.
31
38
 
32
39
  :param is_remote: Whether the runtime requires remote execution.
33
- :param local: Run the function locally vs on the Runtime/Cluster
34
40
 
35
41
  :return: The appropriate launcher for the specified run.
36
42
  """
37
43
  if mlrun.config.is_running_as_api():
38
- if local:
39
- raise mlrun.errors.MLRunInternalServerError(
40
- "Launch of local run inside the server is not allowed"
41
- )
44
+ return self._launcher_container.server_side_launcher(**kwargs)
42
45
 
43
- from mlrun.api.launcher import ServerSideLauncher
46
+ local = kwargs.get("local", False)
47
+ if is_remote and not local:
48
+ return self._launcher_container.client_remote_launcher(**kwargs)
44
49
 
45
- return ServerSideLauncher()
50
+ return self._launcher_container.client_local_launcher(**kwargs)
46
51
 
47
- if is_remote and not local:
48
- return mlrun.launcher.remote.ClientRemoteLauncher()
49
52
 
50
- return mlrun.launcher.local.ClientLocalLauncher(local)
53
+ class LauncherContainer(containers.DeclarativeContainer):
54
+ client_remote_launcher = providers.Factory(
55
+ mlrun.launcher.remote.ClientRemoteLauncher
56
+ )
57
+ client_local_launcher = providers.Factory(mlrun.launcher.local.ClientLocalLauncher)
58
+
59
+ # Provider for injection of a server side launcher.
60
+ # This allows us to override the launcher from external packages without having to import them.
61
+ server_side_launcher = providers.Factory(mlrun.launcher.base.BaseLauncher)
mlrun/launcher/local.py CHANGED
@@ -17,7 +17,7 @@ from typing import Callable, Dict, List, Optional, Union
17
17
 
18
18
  import mlrun.common.schemas.schedule
19
19
  import mlrun.errors
20
- import mlrun.launcher.client
20
+ import mlrun.launcher.client as launcher
21
21
  import mlrun.run
22
22
  import mlrun.runtimes.generators
23
23
  import mlrun.utils.clones
@@ -25,19 +25,19 @@ import mlrun.utils.notifications
25
25
  from mlrun.utils import logger
26
26
 
27
27
 
28
- class ClientLocalLauncher(mlrun.launcher.client.ClientBaseLauncher):
28
+ class ClientLocalLauncher(launcher.ClientBaseLauncher):
29
29
  """
30
30
  ClientLocalLauncher is a launcher that runs the job locally.
31
31
  Either on the user's machine (_is_run_local is True) or on a remote machine (_is_run_local is False).
32
32
  """
33
33
 
34
- def __init__(self, local: bool):
34
+ def __init__(self, local: bool = False, **kwargs):
35
35
  """
36
36
  Initialize a ClientLocalLauncher.
37
37
  :param local: True if the job runs on the user's local machine,
38
38
  False if it runs on a remote machine (e.g. a dedicated k8s pod).
39
39
  """
40
- super().__init__()
40
+ super().__init__(**kwargs)
41
41
  self._is_run_local = local
42
42
 
43
43
  def launch(
@@ -119,14 +119,14 @@ class ClientLocalLauncher(mlrun.launcher.client.ClientBaseLauncher):
119
119
  notifications=notifications,
120
120
  )
121
121
  self._validate_runtime(runtime, run)
122
- result = self.execute(
122
+ result = self._execute(
123
123
  runtime=runtime,
124
124
  run=run,
125
125
  )
126
126
 
127
127
  return result
128
128
 
129
- def execute(
129
+ def _execute(
130
130
  self,
131
131
  runtime: "mlrun.runtimes.BaseRuntime",
132
132
  run: Optional[Union["mlrun.run.RunTemplate", "mlrun.run.RunObject"]] = None,
mlrun/launcher/remote.py CHANGED
@@ -19,7 +19,7 @@ import requests
19
19
  import mlrun.common.schemas.schedule
20
20
  import mlrun.db
21
21
  import mlrun.errors
22
- import mlrun.launcher.client
22
+ import mlrun.launcher.client as launcher
23
23
  import mlrun.run
24
24
  import mlrun.runtimes
25
25
  import mlrun.runtimes.generators
@@ -28,7 +28,7 @@ import mlrun.utils.notifications
28
28
  from mlrun.utils import logger
29
29
 
30
30
 
31
- class ClientRemoteLauncher(mlrun.launcher.client.ClientBaseLauncher):
31
+ class ClientRemoteLauncher(launcher.ClientBaseLauncher):
32
32
  def launch(
33
33
  self,
34
34
  runtime: "mlrun.runtimes.KubejobRuntime",
@@ -106,9 +106,9 @@ class ClientRemoteLauncher(mlrun.launcher.client.ClientBaseLauncher):
106
106
  )
107
107
  self._store_function(runtime, run)
108
108
 
109
- return self.submit_job(runtime, run, schedule, watch)
109
+ return self._submit_job(runtime, run, schedule, watch)
110
110
 
111
- def submit_job(
111
+ def _submit_job(
112
112
  self,
113
113
  runtime: "mlrun.runtimes.KubejobRuntime",
114
114
  run: "mlrun.run.RunObject",
mlrun/lists.py CHANGED
@@ -11,7 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- import warnings
15
14
  from copy import copy
16
15
  from typing import List
17
16
 
@@ -220,16 +219,6 @@ class ArtifactList(list):
220
219
  """return as a list of artifact objects"""
221
220
  return [dict_to_artifact(artifact) for artifact in self]
222
221
 
223
- def objects(self) -> List[Artifact]:
224
- """return as a list of artifact objects"""
225
- warnings.warn(
226
- "'objects' is deprecated in 1.3.0 and will be removed in 1.5.0. "
227
- "Use 'to_objects' instead.",
228
- # TODO: remove in 1.5.0
229
- FutureWarning,
230
- )
231
- return [dict_to_artifact(artifact) for artifact in self]
232
-
233
222
  def dataitems(self) -> List["mlrun.DataItem"]:
234
223
  """return as a list of DataItem objects"""
235
224
  dataitems = []
mlrun/model.py CHANGED
@@ -137,6 +137,8 @@ class ModelObj:
137
137
 
138
138
  # model class for building ModelObj dictionaries
139
139
  class ObjectDict:
140
+ kind = "object_dict"
141
+
140
142
  def __init__(self, classes_map, default_kind=""):
141
143
  self._children = OrderedDict()
142
144
  self._default_kind = default_kind
@@ -903,7 +905,7 @@ class RunSpec(ModelObj):
903
905
 
904
906
  def extract_type_hints_from_inputs(self):
905
907
  """
906
- This method extracts the type hints from the inputs keys in the input dictionary.
908
+ This method extracts the type hints from the input keys in the input dictionary.
907
909
 
908
910
  As a result, after the method ran the inputs dictionary - a dictionary of parameter names as keys and paths as
909
911
  values, will be cleared from type hints and the extracted type hints will be saved in the spec's inputs type
@@ -986,7 +988,7 @@ class RunSpec(ModelObj):
986
988
  # Validate correct pattern:
987
989
  if input_key.count(":") > 1:
988
990
  raise mlrun.errors.MLRunInvalidArgumentError(
989
- f"Incorrect input pattern. Inputs keys can have only a single ':' in them to specify the desired type "
991
+ f"Incorrect input pattern. Input keys can have only a single ':' in them to specify the desired type "
990
992
  f"the input will be parsed as. Given: {input_key}."
991
993
  )
992
994
 
@@ -1309,25 +1311,17 @@ class RunObject(RunTemplate):
1309
1311
  """return or watch on the run logs"""
1310
1312
  if not db:
1311
1313
  db = mlrun.get_run_db()
1314
+
1312
1315
  if not db:
1313
- print("DB is not configured, cannot show logs")
1316
+ logger.warning("DB is not configured, cannot show logs")
1314
1317
  return None
1315
1318
 
1316
- new_offset = 0
1317
- if db.kind == "http":
1318
- state, new_offset = db.watch_log(
1319
- self.metadata.uid, self.metadata.project, watch=watch, offset=offset
1320
- )
1321
- # not expected to reach this else, as FileDB is not supported any more and because we don't watch logs on API
1322
- else:
1323
- state, text = db.get_log(
1324
- self.metadata.uid, self.metadata.project, offset=offset
1325
- )
1326
- if text:
1327
- print(text.decode())
1328
-
1319
+ state, new_offset = db.watch_log(
1320
+ self.metadata.uid, self.metadata.project, watch=watch, offset=offset
1321
+ )
1329
1322
  if state:
1330
- print(f"final state: {state}")
1323
+ logger.debug("Run reached terminal state", state=state)
1324
+
1331
1325
  return state, new_offset
1332
1326
 
1333
1327
  def wait_for_completion(
@@ -15,27 +15,7 @@
15
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
16
  # for backwards compatibility
17
17
 
18
- __all__ = [
19
- "ModelEndpoint",
20
- "EventFieldType",
21
- "EventLiveStats",
22
- "EventKeyMetrics",
23
- "TimeSeriesTarget",
24
- "ModelEndpointTarget",
25
- "FileTargetKind",
26
- "ProjectSecretKeys",
27
- "ModelMonitoringStoreKinds",
28
- ]
29
-
30
- from mlrun.common.model_monitoring import (
31
- EventFieldType,
32
- EventKeyMetrics,
33
- EventLiveStats,
34
- FileTargetKind,
35
- ModelEndpointTarget,
36
- ModelMonitoringStoreKinds,
37
- ProjectSecretKeys,
38
- TimeSeriesTarget,
39
- )
40
18
 
19
+ from .helpers import get_stream_path
41
20
  from .model_endpoint import ModelEndpoint
21
+ from .stores import ModelEndpointStore, ModelEndpointStoreType, get_model_endpoint_store
@@ -18,7 +18,7 @@ import numpy as np
18
18
  import plotly.graph_objects as go
19
19
  from plotly.subplots import make_subplots
20
20
 
21
- from .model_monitoring_batch import DriftResultType, DriftStatus
21
+ from mlrun.model_monitoring.model_monitoring_batch import DriftResultType, DriftStatus
22
22
 
23
23
 
24
24
  class FeaturesDriftTablePlot: