mlrun 1.10.0rc6__py3-none-any.whl → 1.10.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (52) hide show
  1. mlrun/__init__.py +3 -1
  2. mlrun/__main__.py +47 -4
  3. mlrun/artifacts/base.py +0 -27
  4. mlrun/artifacts/dataset.py +0 -8
  5. mlrun/artifacts/model.py +0 -7
  6. mlrun/artifacts/plots.py +0 -13
  7. mlrun/common/schemas/background_task.py +5 -0
  8. mlrun/common/schemas/model_monitoring/__init__.py +2 -0
  9. mlrun/common/schemas/model_monitoring/constants.py +16 -0
  10. mlrun/common/schemas/project.py +4 -0
  11. mlrun/common/schemas/serving.py +2 -0
  12. mlrun/config.py +11 -22
  13. mlrun/datastore/utils.py +3 -1
  14. mlrun/db/base.py +0 -19
  15. mlrun/db/httpdb.py +73 -65
  16. mlrun/db/nopdb.py +0 -12
  17. mlrun/frameworks/tf_keras/__init__.py +4 -4
  18. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +23 -20
  19. mlrun/frameworks/tf_keras/model_handler.py +69 -9
  20. mlrun/frameworks/tf_keras/utils.py +12 -1
  21. mlrun/launcher/base.py +7 -0
  22. mlrun/launcher/client.py +2 -21
  23. mlrun/launcher/local.py +4 -0
  24. mlrun/model_monitoring/applications/_application_steps.py +23 -39
  25. mlrun/model_monitoring/applications/base.py +167 -32
  26. mlrun/model_monitoring/helpers.py +0 -3
  27. mlrun/projects/operations.py +11 -24
  28. mlrun/projects/pipelines.py +33 -3
  29. mlrun/projects/project.py +45 -89
  30. mlrun/run.py +37 -5
  31. mlrun/runtimes/daskjob.py +2 -0
  32. mlrun/runtimes/kubejob.py +5 -8
  33. mlrun/runtimes/mpijob/abstract.py +2 -0
  34. mlrun/runtimes/mpijob/v1.py +2 -0
  35. mlrun/runtimes/nuclio/function.py +2 -0
  36. mlrun/runtimes/nuclio/serving.py +60 -5
  37. mlrun/runtimes/pod.py +3 -0
  38. mlrun/runtimes/remotesparkjob.py +2 -0
  39. mlrun/runtimes/sparkjob/spark3job.py +2 -0
  40. mlrun/serving/__init__.py +2 -0
  41. mlrun/serving/server.py +253 -29
  42. mlrun/serving/states.py +215 -18
  43. mlrun/serving/system_steps.py +391 -0
  44. mlrun/serving/v2_serving.py +9 -8
  45. mlrun/utils/helpers.py +18 -4
  46. mlrun/utils/version/version.json +2 -2
  47. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/METADATA +9 -9
  48. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/RECORD +52 -51
  49. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/WHEEL +0 -0
  50. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/entry_points.txt +0 -0
  51. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/licenses/LICENSE +0 -0
  52. {mlrun-1.10.0rc6.dist-info → mlrun-1.10.0rc8.dist-info}/top_level.txt +0 -0
mlrun/__init__.py CHANGED
@@ -61,6 +61,7 @@ from .run import (
61
61
  import_function,
62
62
  new_function,
63
63
  retry_pipeline,
64
+ terminate_pipeline,
64
65
  wait_for_pipeline_completion,
65
66
  )
66
67
  from .runtimes import mounts, new_model_server
@@ -217,5 +218,6 @@ def set_env_from_file(env_file: str, return_dict: bool = False) -> Optional[dict
217
218
  for key, value in env_vars.items():
218
219
  environ[key] = value
219
220
 
220
- mlconf.reload() # reload mlrun configuration
221
+ # reload mlrun configuration
222
+ mlconf.reload()
221
223
  return env_vars if return_dict else None
mlrun/__main__.py CHANGED
@@ -23,6 +23,7 @@ from ast import literal_eval
23
23
  from base64 import b64decode
24
24
  from os import environ, path, remove
25
25
  from pprint import pprint
26
+ from typing import Optional
26
27
 
27
28
  import click
28
29
  import dotenv
@@ -199,6 +200,13 @@ def main():
199
200
  multiple=True,
200
201
  help="Logging configurations for the handler's returning values",
201
202
  )
203
+ @click.option(
204
+ "--allow-cross-project",
205
+ is_flag=True,
206
+ default=True, # TODO: remove this default in 1.11
207
+ help="Override the loaded project name. This flag ensures awareness of loading an existing project yaml "
208
+ "as a baseline for a new project with a different name",
209
+ )
202
210
  def run(
203
211
  url,
204
212
  param,
@@ -242,6 +250,7 @@ def run(
242
250
  run_args,
243
251
  ensure_project,
244
252
  returns,
253
+ allow_cross_project,
245
254
  ):
246
255
  """Execute a task and inject parameters."""
247
256
 
@@ -293,10 +302,11 @@ def run(
293
302
  mlrun.get_or_create_project(
294
303
  name=project,
295
304
  context="./",
305
+ allow_cross_project=allow_cross_project,
296
306
  )
297
307
  if func_url or kind:
298
308
  if func_url:
299
- runtime = func_url_to_runtime(func_url, ensure_project)
309
+ runtime = func_url_to_runtime(func_url, ensure_project, allow_cross_project)
300
310
  kind = get_in(runtime, "kind", kind or "job")
301
311
  if runtime is None:
302
312
  exit(1)
@@ -494,6 +504,13 @@ def run(
494
504
  default="/tmp/fullimage",
495
505
  help="path to file with full image data",
496
506
  )
507
+ @click.option(
508
+ "--allow-cross-project",
509
+ is_flag=True,
510
+ default=True, # TODO: remove this default in 1.11
511
+ help="Override the loaded project name. This flag ensures awareness of loading an existing project yaml "
512
+ "as a baseline for a new project with a different name",
513
+ )
497
514
  def build(
498
515
  func_url,
499
516
  name,
@@ -516,6 +533,7 @@ def build(
516
533
  state_file_path,
517
534
  image_file_path,
518
535
  full_image_file_path,
536
+ allow_cross_project,
519
537
  ):
520
538
  """Build a container image from code and requirements."""
521
539
 
@@ -591,6 +609,7 @@ def build(
591
609
  mlrun.get_or_create_project(
592
610
  name=project,
593
611
  context="./",
612
+ allow_cross_project=allow_cross_project,
594
613
  )
595
614
 
596
615
  if hasattr(func, "deploy"):
@@ -644,6 +663,13 @@ def build(
644
663
  is_flag=True,
645
664
  help="ensure the project exists, if not, create project",
646
665
  )
666
+ @click.option(
667
+ "--allow-cross-project",
668
+ is_flag=True,
669
+ default=True, # TODO: remove this default in 1.11
670
+ help="Override the loaded project name. This flag ensures awareness of loading an existing project yaml "
671
+ "as a baseline for a new project with a different name",
672
+ )
647
673
  def deploy(
648
674
  spec,
649
675
  source,
@@ -656,6 +682,7 @@ def deploy(
656
682
  verbose,
657
683
  env_file,
658
684
  ensure_project,
685
+ allow_cross_project,
659
686
  ):
660
687
  """Deploy model or function"""
661
688
  if env_file:
@@ -665,10 +692,11 @@ def deploy(
665
692
  mlrun.get_or_create_project(
666
693
  name=project,
667
694
  context="./",
695
+ allow_cross_project=allow_cross_project,
668
696
  )
669
697
 
670
698
  if func_url:
671
- runtime = func_url_to_runtime(func_url, ensure_project)
699
+ runtime = func_url_to_runtime(func_url, ensure_project, allow_cross_project)
672
700
  if runtime is None:
673
701
  exit(1)
674
702
  elif spec:
@@ -971,6 +999,13 @@ def logs(uid, project, offset, db):
971
999
  "destination define: file=notification.json or a "
972
1000
  'dictionary configuration e.g \'{"slack":{"webhook":"<webhook>"}}\'',
973
1001
  )
1002
+ @click.option(
1003
+ "--allow-cross-project",
1004
+ is_flag=True,
1005
+ default=True, # TODO: remove this default in 1.11
1006
+ help="Override the loaded project name. This flag ensures awareness of loading an existing project yaml "
1007
+ "as a baseline for a new project with a different name",
1008
+ )
974
1009
  def project(
975
1010
  context,
976
1011
  name,
@@ -998,6 +1033,7 @@ def project(
998
1033
  notifications,
999
1034
  save_secrets,
1000
1035
  save,
1036
+ allow_cross_project,
1001
1037
  ):
1002
1038
  """load and/or run a project"""
1003
1039
  if env_file:
@@ -1024,6 +1060,7 @@ def project(
1024
1060
  clone=clone,
1025
1061
  save=save,
1026
1062
  parameters=parameters,
1063
+ allow_cross_project=allow_cross_project,
1027
1064
  )
1028
1065
  url_str = " from " + url if url else ""
1029
1066
  print(f"Loading project {proj.name}{url_str} into {context}:\n")
@@ -1337,7 +1374,11 @@ def dict_to_str(struct: dict):
1337
1374
  return ",".join([f"{k}={v}" for k, v in struct.items()])
1338
1375
 
1339
1376
 
1340
- def func_url_to_runtime(func_url, ensure_project: bool = False):
1377
+ def func_url_to_runtime(
1378
+ func_url,
1379
+ ensure_project: bool = False,
1380
+ allow_cross_project: Optional[bool] = None,
1381
+ ):
1341
1382
  try:
1342
1383
  if func_url.startswith("db://"):
1343
1384
  func_url = func_url[5:]
@@ -1348,7 +1389,9 @@ def func_url_to_runtime(func_url, ensure_project: bool = False):
1348
1389
  func_url = "function.yaml" if func_url == "." else func_url
1349
1390
  runtime = import_function_to_dict(func_url, {})
1350
1391
  else:
1351
- mlrun_project = load_project(".", save=ensure_project)
1392
+ mlrun_project = load_project(
1393
+ ".", save=ensure_project, allow_cross_project=allow_cross_project
1394
+ )
1352
1395
  function = mlrun_project.get_function(func_url, enrich=True)
1353
1396
  if function.kind == "local":
1354
1397
  command, function = load_func_code(function)
mlrun/artifacts/base.py CHANGED
@@ -223,28 +223,9 @@ class Artifact(ModelObj):
223
223
  target_path=None,
224
224
  project=None,
225
225
  src_path: typing.Optional[str] = None,
226
- # All params up until here are legacy params for compatibility with legacy artifacts.
227
- # TODO: remove them in 1.10.0.
228
226
  metadata: ArtifactMetadata = None,
229
227
  spec: ArtifactSpec = None,
230
228
  ):
231
- if (
232
- key
233
- or body
234
- or viewer
235
- or is_inline
236
- or format
237
- or size
238
- or target_path
239
- or project
240
- or src_path
241
- ):
242
- warnings.warn(
243
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
244
- "Use the metadata and spec parameters instead.",
245
- DeprecationWarning,
246
- )
247
-
248
229
  self._metadata = None
249
230
  self.metadata = metadata
250
231
  self._spec = None
@@ -769,17 +750,9 @@ class LinkArtifact(Artifact):
769
750
  link_key=None,
770
751
  link_tree=None,
771
752
  project=None,
772
- # All params up until here are legacy params for compatibility with legacy artifacts.
773
- # TODO: remove them in 1.10.0.
774
753
  metadata: ArtifactMetadata = None,
775
754
  spec: LinkArtifactSpec = None,
776
755
  ):
777
- if key or target_path or link_iteration or link_key or link_tree or project:
778
- warnings.warn(
779
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
780
- "Use the metadata and spec parameters instead.",
781
- DeprecationWarning,
782
- )
783
756
  super().__init__(
784
757
  key, target_path=target_path, project=project, metadata=metadata, spec=spec
785
758
  )
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  import os
15
15
  import pathlib
16
- import warnings
17
16
  from io import StringIO
18
17
  from typing import Optional
19
18
 
@@ -161,13 +160,6 @@ class DatasetArtifact(Artifact):
161
160
  label_column: Optional[str] = None,
162
161
  **kwargs,
163
162
  ):
164
- if key or format or target_path:
165
- warnings.warn(
166
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
167
- "Use the metadata and spec parameters instead.",
168
- DeprecationWarning,
169
- )
170
-
171
163
  format = (format or "").lower()
172
164
  super().__init__(key, None, format=format, target_path=target_path)
173
165
  if format and format not in self.SUPPORTED_FORMATS:
mlrun/artifacts/model.py CHANGED
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import tempfile
16
- import warnings
17
16
  from os import path
18
17
  from typing import Any, Optional, Union
19
18
 
@@ -189,12 +188,6 @@ class ModelArtifact(Artifact):
189
188
  Saved as a sub-dictionary under the parameter.
190
189
  :param kwargs: Arguments to pass to the artifact class.
191
190
  """
192
- if key or body or format or target_path:
193
- warnings.warn(
194
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
195
- "Use the metadata and spec parameters instead.",
196
- DeprecationWarning,
197
- )
198
191
  super().__init__(key, body, format=format, target_path=target_path, **kwargs)
199
192
  model_file = str(model_file or "")
200
193
  if model_file and model_url:
mlrun/artifacts/plots.py CHANGED
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  import base64
15
15
  import typing
16
- import warnings
17
16
  from io import BytesIO
18
17
 
19
18
  import mlrun
@@ -35,12 +34,6 @@ class PlotArtifact(Artifact):
35
34
  def __init__(
36
35
  self, key=None, body=None, is_inline=False, target_path=None, title=None
37
36
  ):
38
- if key or body or is_inline or target_path:
39
- warnings.warn(
40
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
41
- "Use the metadata and spec parameters instead.",
42
- DeprecationWarning,
43
- )
44
37
  super().__init__(key, body, format="html", target_path=target_path)
45
38
  self.metadata.description = title
46
39
 
@@ -94,12 +87,6 @@ class PlotlyArtifact(Artifact):
94
87
  :param key: Key for the artifact to be stored in the database.
95
88
  :param target_path: Path to save the artifact.
96
89
  """
97
- if key or target_path:
98
- warnings.warn(
99
- "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
100
- "Use the metadata and spec parameters instead.",
101
- DeprecationWarning,
102
- )
103
90
  # Validate the plotly package:
104
91
  try:
105
92
  from plotly.graph_objs import Figure
@@ -22,6 +22,10 @@ import mlrun.common.types
22
22
  from .object import ObjectKind
23
23
 
24
24
 
25
+ class BackGroundTaskLabel(mlrun.common.types.StrEnum):
26
+ pipeline = "pipeline"
27
+
28
+
25
29
  class BackgroundTaskState(mlrun.common.types.StrEnum):
26
30
  succeeded = "succeeded"
27
31
  failed = "failed"
@@ -37,6 +41,7 @@ class BackgroundTaskState(mlrun.common.types.StrEnum):
37
41
 
38
42
  class BackgroundTaskMetadata(pydantic.v1.BaseModel):
39
43
  name: str
44
+ id: typing.Optional[int]
40
45
  kind: typing.Optional[str]
41
46
  project: typing.Optional[str]
42
47
  created: typing.Optional[datetime.datetime]
@@ -28,6 +28,7 @@ from .constants import (
28
28
  ModelEndpointCreationStrategy,
29
29
  ModelEndpointMonitoringMetricType,
30
30
  ModelEndpointSchema,
31
+ ModelMonitoringAppLabel,
31
32
  ModelMonitoringMode,
32
33
  MonitoringFunctionNames,
33
34
  PredictionsQueryConstants,
@@ -36,6 +37,7 @@ from .constants import (
36
37
  ResultKindApp,
37
38
  ResultStatusApp,
38
39
  SpecialApps,
40
+ StreamProcessingEvent,
39
41
  TDEngineSuperTables,
40
42
  TSDBTarget,
41
43
  V3IOTSDBTables,
@@ -142,6 +142,22 @@ class EventFieldType:
142
142
  EFFECTIVE_SAMPLE_COUNT = "effective_sample_count"
143
143
 
144
144
 
145
+ class StreamProcessingEvent:
146
+ MODEL = "model"
147
+ MODEL_CLASS = "model_class"
148
+ MICROSEC = "microsec"
149
+ WHEN = "when"
150
+ ERROR = "error"
151
+ ENDPOINT_ID = "endpoint_id"
152
+ SAMPLING_PERCENTAGE = "sampling_percentage"
153
+ EFFECTIVE_SAMPLE_COUNT = "effective_sample_count"
154
+ LABELS = "labels"
155
+ FUNCTION_URI = "function_uri"
156
+ REQUEST = "request"
157
+ RESPONSE = "resp"
158
+ METRICS = "metrics"
159
+
160
+
145
161
  class FeatureSetFeatures(MonitoringStrEnum):
146
162
  LATENCY = EventFieldType.LATENCY
147
163
  METRICS = EventFieldType.METRICS
@@ -148,6 +148,10 @@ class ProjectSummary(pydantic.v1.BaseModel):
148
148
  datasets_count: int = 0
149
149
  documents_count: int = 0
150
150
  llm_prompts_count: int = 0
151
+ running_model_monitoring_functions: int = 0
152
+ failed_model_monitoring_functions: int = 0
153
+ real_time_model_endpoint_count: int = 0
154
+ batch_model_endpoint_count: int = 0
151
155
 
152
156
 
153
157
  class IguazioProject(pydantic.v1.BaseModel):
@@ -33,7 +33,9 @@ class MonitoringData(StrEnum):
33
33
  INPUTS = "inputs"
34
34
  OUTPUTS = "outputs"
35
35
  INPUT_PATH = "input_path"
36
+ RESULT_PATH = "result_path"
36
37
  CREATION_STRATEGY = "creation_strategy"
37
38
  LABELS = "labels"
38
39
  MODEL_PATH = "model_path"
39
40
  MODEL_ENDPOINT_UID = "model_endpoint_uid"
41
+ MODEL_CLASS = "model_class"
mlrun/config.py CHANGED
@@ -107,6 +107,8 @@ default_config = {
107
107
  "submit_timeout": "280", # timeout when submitting a new k8s resource
108
108
  # runtimes cleanup interval in seconds
109
109
  "runtimes_cleanup_interval": "300",
110
+ "background_task_cleanup_interval": "86400", # 24 hours in seconds
111
+ "background_task_max_age": "21600", # 6 hours in seconds
110
112
  "monitoring": {
111
113
  "runs": {
112
114
  # runs monitoring interval in seconds
@@ -233,6 +235,7 @@ default_config = {
233
235
  "delete_function": "900",
234
236
  "model_endpoint_creation": "600",
235
237
  "model_endpoint_tsdb_leftovers": "900",
238
+ "terminate_pipeline": "300",
236
239
  },
237
240
  "runtimes": {
238
241
  "dask": "600",
@@ -638,6 +641,7 @@ default_config = {
638
641
  "offline_storage_path": "model-endpoints/{kind}",
639
642
  "parquet_batching_max_events": 10_000,
640
643
  "parquet_batching_timeout_secs": timedelta(minutes=1).total_seconds(),
644
+ "model_endpoint_creation_check_period": "15",
641
645
  },
642
646
  "secret_stores": {
643
647
  # Use only in testing scenarios (such as integration tests) to avoid using k8s for secrets (will use in-memory
@@ -896,11 +900,7 @@ class Config:
896
900
  return result
897
901
 
898
902
  def __setattr__(self, attr, value):
899
- # in order for the dbpath setter to work
900
- if attr == "dbpath":
901
- super().__setattr__(attr, value)
902
- else:
903
- self._cfg[attr] = value
903
+ self._cfg[attr] = value
904
904
 
905
905
  def __dir__(self):
906
906
  return list(self._cfg) + dir(self.__class__)
@@ -1244,23 +1244,6 @@ class Config:
1244
1244
  # since the property will need to be url, which exists in other structs as well
1245
1245
  return config.ui.url or config.ui_url
1246
1246
 
1247
- @property
1248
- def dbpath(self):
1249
- return self._dbpath
1250
-
1251
- @dbpath.setter
1252
- def dbpath(self, value):
1253
- self._dbpath = value
1254
- if value:
1255
- # importing here to avoid circular dependency
1256
- import mlrun.db
1257
-
1258
- # It ensures that SSL verification is set before establishing a connection
1259
- _configure_ssl_verification(self.httpdb.http.verify)
1260
-
1261
- # when dbpath is set we want to connect to it which will sync configuration from it to the client
1262
- mlrun.db.get_run_db(value, force_reconnect=True)
1263
-
1264
1247
  def is_api_running_on_k8s(self):
1265
1248
  # determine if the API service is attached to K8s cluster
1266
1249
  # when there is a cluster the .namespace is set
@@ -1436,6 +1419,12 @@ def _do_populate(env=None, skip_errors=False):
1436
1419
  _configure_ssl_verification(config.httpdb.http.verify)
1437
1420
  _validate_config(config)
1438
1421
 
1422
+ if config.dbpath:
1423
+ from mlrun.db import get_run_db
1424
+
1425
+ # when dbpath is set we want to connect to it which will sync configuration from it to the client
1426
+ get_run_db(config.dbpath, force_reconnect=True)
1427
+
1439
1428
 
1440
1429
  def _validate_config(config):
1441
1430
  try:
mlrun/datastore/utils.py CHANGED
@@ -236,9 +236,11 @@ class KafkaParameters:
236
236
  "partitions": "",
237
237
  "sasl": "",
238
238
  "worker_allocation_mode": "",
239
- "tls_enable": "", # for Nuclio with Confluent Kafka (Sarama client)
239
+ # for Nuclio with Confluent Kafka
240
+ "tls_enable": "",
240
241
  "tls": "",
241
242
  "new_topic": "",
243
+ "nuclio_annotations": "",
242
244
  }
243
245
  self._reference_dicts = (
244
246
  self._custom_attributes,
mlrun/db/base.py CHANGED
@@ -97,9 +97,6 @@ class RunDBInterface(ABC):
97
97
  uid: Optional[Union[str, list[str]]] = None,
98
98
  project: Optional[str] = None,
99
99
  labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
100
- state: Optional[
101
- mlrun.common.runtimes.constants.RunStates
102
- ] = None, # Backward compatibility
103
100
  states: Optional[list[mlrun.common.runtimes.constants.RunStates]] = None,
104
101
  sort: bool = True,
105
102
  iter: bool = False,
@@ -470,22 +467,6 @@ class RunDBInterface(ABC):
470
467
  ) -> mlrun.common.schemas.FeaturesOutputV2:
471
468
  pass
472
469
 
473
- # TODO: remove in 1.10.0
474
- @deprecated(
475
- version="1.7.0",
476
- reason="'list_entities' will be removed in 1.10.0, use 'list_entities_v2' instead",
477
- category=FutureWarning,
478
- )
479
- @abstractmethod
480
- def list_entities(
481
- self,
482
- project: str,
483
- name: Optional[str] = None,
484
- tag: Optional[str] = None,
485
- labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
486
- ) -> mlrun.common.schemas.EntitiesOutput:
487
- pass
488
-
489
470
  @abstractmethod
490
471
  def list_entities_v2(
491
472
  self,