mlrun 1.10.0rc40__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (150) hide show
  1. mlrun/__init__.py +3 -2
  2. mlrun/__main__.py +0 -4
  3. mlrun/artifacts/dataset.py +2 -2
  4. mlrun/artifacts/plots.py +1 -1
  5. mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
  6. mlrun/auth/nuclio.py +89 -0
  7. mlrun/auth/providers.py +429 -0
  8. mlrun/auth/utils.py +415 -0
  9. mlrun/common/constants.py +7 -0
  10. mlrun/common/model_monitoring/helpers.py +41 -4
  11. mlrun/common/runtimes/constants.py +28 -0
  12. mlrun/common/schemas/__init__.py +13 -3
  13. mlrun/common/schemas/alert.py +2 -2
  14. mlrun/common/schemas/api_gateway.py +3 -0
  15. mlrun/common/schemas/auth.py +10 -10
  16. mlrun/common/schemas/client_spec.py +4 -0
  17. mlrun/common/schemas/constants.py +25 -0
  18. mlrun/common/schemas/frontend_spec.py +1 -8
  19. mlrun/common/schemas/function.py +24 -0
  20. mlrun/common/schemas/hub.py +3 -2
  21. mlrun/common/schemas/model_monitoring/__init__.py +1 -1
  22. mlrun/common/schemas/model_monitoring/constants.py +2 -2
  23. mlrun/common/schemas/secret.py +17 -2
  24. mlrun/common/secrets.py +95 -1
  25. mlrun/common/types.py +10 -10
  26. mlrun/config.py +53 -15
  27. mlrun/data_types/infer.py +2 -2
  28. mlrun/datastore/__init__.py +2 -3
  29. mlrun/datastore/base.py +274 -10
  30. mlrun/datastore/datastore.py +1 -1
  31. mlrun/datastore/datastore_profile.py +49 -17
  32. mlrun/datastore/model_provider/huggingface_provider.py +6 -2
  33. mlrun/datastore/model_provider/model_provider.py +2 -2
  34. mlrun/datastore/model_provider/openai_provider.py +2 -2
  35. mlrun/datastore/s3.py +15 -16
  36. mlrun/datastore/sources.py +1 -1
  37. mlrun/datastore/store_resources.py +4 -4
  38. mlrun/datastore/storeytargets.py +16 -10
  39. mlrun/datastore/targets.py +1 -1
  40. mlrun/datastore/utils.py +16 -3
  41. mlrun/datastore/v3io.py +1 -1
  42. mlrun/db/base.py +36 -12
  43. mlrun/db/httpdb.py +316 -101
  44. mlrun/db/nopdb.py +29 -11
  45. mlrun/errors.py +4 -2
  46. mlrun/execution.py +11 -12
  47. mlrun/feature_store/api.py +1 -1
  48. mlrun/feature_store/common.py +1 -1
  49. mlrun/feature_store/feature_vector_utils.py +1 -1
  50. mlrun/feature_store/steps.py +8 -6
  51. mlrun/frameworks/_common/utils.py +3 -3
  52. mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
  53. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
  54. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
  55. mlrun/frameworks/_ml_common/utils.py +2 -1
  56. mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
  57. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
  58. mlrun/frameworks/onnx/dataset.py +2 -1
  59. mlrun/frameworks/onnx/mlrun_interface.py +2 -1
  60. mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
  61. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
  62. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
  63. mlrun/frameworks/pytorch/utils.py +2 -1
  64. mlrun/frameworks/sklearn/metric.py +2 -1
  65. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
  66. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
  67. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
  68. mlrun/hub/__init__.py +37 -0
  69. mlrun/hub/base.py +142 -0
  70. mlrun/hub/module.py +67 -76
  71. mlrun/hub/step.py +113 -0
  72. mlrun/launcher/base.py +2 -1
  73. mlrun/launcher/local.py +2 -1
  74. mlrun/model.py +12 -2
  75. mlrun/model_monitoring/__init__.py +0 -1
  76. mlrun/model_monitoring/api.py +2 -2
  77. mlrun/model_monitoring/applications/base.py +20 -6
  78. mlrun/model_monitoring/applications/context.py +1 -0
  79. mlrun/model_monitoring/controller.py +7 -17
  80. mlrun/model_monitoring/db/_schedules.py +2 -16
  81. mlrun/model_monitoring/db/_stats.py +2 -13
  82. mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
  83. mlrun/model_monitoring/db/tsdb/base.py +2 -4
  84. mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
  85. mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
  86. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
  87. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
  88. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
  89. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
  90. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
  91. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
  92. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
  93. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
  94. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
  95. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
  96. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
  97. mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
  98. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +4 -6
  99. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +147 -79
  100. mlrun/model_monitoring/features_drift_table.py +2 -1
  101. mlrun/model_monitoring/helpers.py +2 -1
  102. mlrun/model_monitoring/stream_processing.py +18 -16
  103. mlrun/model_monitoring/writer.py +4 -3
  104. mlrun/package/__init__.py +2 -1
  105. mlrun/platforms/__init__.py +0 -44
  106. mlrun/platforms/iguazio.py +1 -1
  107. mlrun/projects/operations.py +11 -10
  108. mlrun/projects/project.py +81 -82
  109. mlrun/run.py +4 -7
  110. mlrun/runtimes/__init__.py +2 -204
  111. mlrun/runtimes/base.py +89 -21
  112. mlrun/runtimes/constants.py +225 -0
  113. mlrun/runtimes/daskjob.py +4 -2
  114. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
  115. mlrun/runtimes/mounts.py +5 -0
  116. mlrun/runtimes/nuclio/__init__.py +12 -8
  117. mlrun/runtimes/nuclio/api_gateway.py +36 -6
  118. mlrun/runtimes/nuclio/application/application.py +200 -32
  119. mlrun/runtimes/nuclio/function.py +154 -49
  120. mlrun/runtimes/nuclio/serving.py +55 -42
  121. mlrun/runtimes/pod.py +59 -10
  122. mlrun/secrets.py +46 -2
  123. mlrun/serving/__init__.py +2 -0
  124. mlrun/serving/remote.py +5 -5
  125. mlrun/serving/routers.py +3 -3
  126. mlrun/serving/server.py +46 -43
  127. mlrun/serving/serving_wrapper.py +6 -2
  128. mlrun/serving/states.py +554 -207
  129. mlrun/serving/steps.py +1 -1
  130. mlrun/serving/system_steps.py +42 -33
  131. mlrun/track/trackers/mlflow_tracker.py +29 -31
  132. mlrun/utils/helpers.py +89 -16
  133. mlrun/utils/http.py +9 -2
  134. mlrun/utils/notifications/notification/git.py +1 -1
  135. mlrun/utils/notifications/notification/mail.py +39 -16
  136. mlrun/utils/notifications/notification_pusher.py +2 -2
  137. mlrun/utils/version/version.json +2 -2
  138. mlrun/utils/version/version.py +3 -4
  139. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +39 -49
  140. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +144 -130
  141. mlrun/db/auth_utils.py +0 -152
  142. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -343
  143. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
  144. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
  145. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1368
  146. mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +0 -51
  147. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
  148. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
  149. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
  150. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/hub/module.py CHANGED
@@ -12,76 +12,62 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import os
16
- import subprocess
17
- import sys
15
+ import warnings
18
16
  from pathlib import Path
19
17
  from typing import Optional, Union
20
18
 
21
19
  import yaml
20
+ from deprecated import deprecated
22
21
 
23
22
  import mlrun.common.types
24
23
  import mlrun.utils
25
24
  from mlrun.common.schemas.hub import HubModuleType, HubSourceType
26
- from mlrun.run import function_to_module, get_object
27
- from mlrun.utils import logger
25
+ from mlrun.run import get_object
28
26
 
29
- from ..errors import MLRunBadRequestError
30
- from ..model import ModelObj
31
27
  from ..utils import extend_hub_uri_if_needed
28
+ from .base import HubAsset
32
29
 
33
30
 
34
- class HubModule(ModelObj):
31
+ class HubModule(HubAsset):
32
+ ASSET_TYPE = HubSourceType.modules
33
+
35
34
  def __init__(
36
35
  self,
37
36
  name: str,
37
+ version: str,
38
38
  kind: Union[HubModuleType, str],
39
- version: Optional[str] = None,
40
39
  description: Optional[str] = None,
41
40
  categories: Optional[list] = None,
42
41
  requirements: Optional[list] = None,
43
- local_path: Optional[str] = None,
42
+ local_path: Optional[Path] = None,
44
43
  filename: Optional[str] = None,
45
44
  example: Optional[str] = None,
46
45
  url: Optional[str] = None,
47
46
  **kwargs, # catch all for unused args
48
47
  ):
49
- self.name: str = name
50
- self.version: str = version
51
- self.kind: HubModuleType = kind
52
- self.description: str = description or ""
53
- self.categories: list = categories or []
54
- self.requirements: list = requirements or []
55
- self.local_path: str = local_path or ""
56
- self.filename: str = filename or name + ".py"
57
- self.example: str = example or ""
58
- self.url: str = url or ""
59
-
60
- def module(self):
61
- """Import the module after downloading its fils to local_path"""
62
- try:
63
- return function_to_module(code=self.filename, workdir=self.local_path)
64
- except FileNotFoundError:
65
- searched_path = self.local_path or "./"
66
- raise FileNotFoundError(
67
- f"Module file {self.filename} not found in {searched_path}, try calling download_module_files() first"
68
- )
69
-
70
- def install_requirements(self) -> None:
71
- """
72
- Install pip-style requirements (e.g., ["pandas>=2.0.0", "requests==2.31.0"]).
73
- """
74
- for req in self.requirements:
75
- logger.info(f"Installing {req} ...")
76
- try:
77
- subprocess.run(
78
- [sys.executable, "-m", "pip", "install", req], check=True, text=True
79
- )
80
- logger.info(f"Installed {req}")
81
- except subprocess.CalledProcessError as e:
82
- logger.error(f"Failed to install {req} (exit code {e.returncode})")
83
-
84
- def download_module_files(self, local_path=None, secrets=None):
48
+ super().__init__(
49
+ name=name,
50
+ version=version,
51
+ description=description,
52
+ categories=categories,
53
+ requirements=requirements,
54
+ local_path=local_path,
55
+ filename=filename,
56
+ example=example,
57
+ url=url,
58
+ )
59
+ self.kind = kind
60
+
61
+ # TODO: Remove this in 1.13.0
62
+ @deprecated(
63
+ version="1.11.0",
64
+ reason="This function is deprecated and will be removed in 1.13. You can download module files by calling "
65
+ "download_files() instead.",
66
+ category=FutureWarning,
67
+ )
68
+ def download_module_files(
69
+ self, local_path: Optional[str] = None, secrets: Optional[dict] = None
70
+ ):
85
71
  """
86
72
  Download this hub module’s files (code file and, if available, an example notebook) to the target directory
87
73
  specified by `local_path` (defaults to the current working directory).
@@ -89,52 +75,50 @@ class HubModule(ModelObj):
89
75
  """
90
76
  self.local_path = self.verify_directory(path=local_path)
91
77
  source_url, _ = extend_hub_uri_if_needed(
92
- uri=self.url, asset_type=HubSourceType.modules, file=self.filename
78
+ uri=self.url, asset_type=self.ASSET_TYPE, file=self.filename
93
79
  )
94
80
  self._download_object(
95
81
  obj_url=source_url, target_name=self.filename, secrets=secrets
96
82
  )
97
83
  if self.example:
98
84
  example_url, _ = extend_hub_uri_if_needed(
99
- uri=self.url, asset_type=HubSourceType.modules, file=self.example
85
+ uri=self.url, asset_type=self.ASSET_TYPE, file=self.example
100
86
  )
101
87
  self._download_object(
102
88
  obj_url=example_url, target_name=self.example, secrets=secrets
103
89
  )
104
90
 
105
- def _download_object(self, obj_url, target_name, secrets=None):
106
- data = get_object(url=obj_url, secrets=secrets)
107
- target_dir = self.local_path if self.local_path is not None else os.getcwd()
108
- target_filepath = os.path.join(target_dir, target_name)
109
- with open(target_filepath, "wb") as f:
110
- f.write(data)
111
-
112
- @staticmethod
113
- def verify_directory(path: Optional[str] = None) -> Path:
91
+ def download_files(
92
+ self,
93
+ local_path: Optional[str] = None,
94
+ download_example: bool = True,
95
+ ):
114
96
  """
115
- Validate that the given path is an existing directory.
116
- If no path has been provided, returns current working directory.
97
+ Download this hub module’s code file.
98
+ :param local_path: Target directory to download the module files to. Defaults to the current working directory.
99
+ This path will be used to locate the code file when importing it as a module.
100
+ :param download_example: Whether to download the example notebook if available. Defaults to True.
117
101
  """
118
- if path:
119
- path = Path(path)
120
- if not path.exists():
121
- raise ValueError(f"Path does not exist: {path}")
122
- if not path.is_dir():
123
- raise ValueError(f"Path is not a directory: {path}")
124
- return path
125
- return Path(os.getcwd())
102
+ super().download_files(
103
+ local_path=local_path,
104
+ download_example=download_example,
105
+ )
126
106
 
107
+ # TODO: Remove this in 1.13.0
108
+ @deprecated(
109
+ version="1.11.0",
110
+ reason="This function is deprecated and will be removed in 1.13. You can get the module source file path by"
111
+ " calling get_src_file_path() instead.",
112
+ category=FutureWarning,
113
+ )
127
114
  def get_module_file_path(self):
128
- if not self.local_path:
129
- raise MLRunBadRequestError(
130
- "module files haven't been downloaded yet, try calling download_module_files() first"
131
- )
132
- return str(Path(self.local_path) / self.filename)
115
+ """Get the full path to the module's code file."""
116
+ return super().get_src_file_path()
133
117
 
134
118
 
135
119
  def get_hub_module(
136
- url: str = "",
137
- download_files: Optional[bool] = True,
120
+ url: str,
121
+ download_files: bool = True,
138
122
  secrets: Optional[dict] = None,
139
123
  local_path: Optional[str] = None,
140
124
  ) -> HubModule:
@@ -158,11 +142,18 @@ def get_hub_module(
158
142
  spec = item_yaml.pop("spec", {})
159
143
  hub_module = HubModule(**item_yaml, **spec, url=url)
160
144
  if download_files:
161
- hub_module.download_module_files(local_path=local_path, secrets=secrets)
145
+ with warnings.catch_warnings():
146
+ warnings.filterwarnings("ignore", category=FutureWarning)
147
+ hub_module.download_module_files(local_path=local_path, secrets=secrets)
162
148
  return hub_module
163
149
 
164
150
 
165
- def import_module(url="", install_requirements=False, secrets=None, local_path=None):
151
+ def import_module(
152
+ url: str,
153
+ install_requirements: bool = False,
154
+ secrets: Optional[dict] = None,
155
+ local_path: Optional[str] = None,
156
+ ):
166
157
  """
167
158
  Import a module from the hub to use directly.
168
159
  :param url: hub module url in the format "hub://[<source>/]<item-name>[:<tag>]"
mlrun/hub/step.py ADDED
@@ -0,0 +1,113 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ from pathlib import Path
16
+ from typing import Optional
17
+
18
+ import yaml
19
+
20
+ from mlrun.common.schemas.hub import HubSourceType
21
+ from mlrun.run import get_object
22
+
23
+ from ..errors import MLRunInvalidArgumentError
24
+ from ..utils import extend_hub_uri_if_needed
25
+ from .base import HubAsset
26
+
27
+
28
+ class HubStep(HubAsset):
29
+ ASSET_TYPE = HubSourceType.steps
30
+
31
+ def __init__(
32
+ self,
33
+ name: str,
34
+ version: str,
35
+ class_name: str,
36
+ default_handler: str,
37
+ description: Optional[str] = None,
38
+ categories: Optional[list] = None,
39
+ requirements: Optional[list] = None,
40
+ local_path: Optional[Path] = None,
41
+ filename: Optional[str] = None,
42
+ example: Optional[str] = None,
43
+ url: Optional[str] = None,
44
+ **kwargs, # catch all for unused args
45
+ ):
46
+ super().__init__(
47
+ name=name,
48
+ version=version,
49
+ description=description,
50
+ categories=categories,
51
+ requirements=requirements,
52
+ local_path=local_path,
53
+ filename=filename,
54
+ example=example,
55
+ url=url,
56
+ )
57
+ self.class_name = class_name
58
+ self.default_handler = default_handler
59
+
60
+ def download_files(
61
+ self,
62
+ local_path: Optional[str] = None,
63
+ download_example: bool = False,
64
+ ):
65
+ """
66
+ Download this step's code file.
67
+ :param local_path: Target directory to download the step files to. Defaults to the current working directory.
68
+ This path will be used to locate the code file when importing it as a python module.
69
+ :param download_example: Whether to download the example notebook if available. Defaults to False.
70
+ """
71
+ super().download_files(
72
+ local_path=local_path,
73
+ download_example=download_example,
74
+ )
75
+
76
+
77
+ def get_hub_step(
78
+ url: str,
79
+ local_path: Optional[str] = None,
80
+ download_files: bool = True,
81
+ include_example: bool = False,
82
+ ) -> HubStep:
83
+ """
84
+ Get a hub-step object containing metadata of the requested step.
85
+ :param url: Hub step url in the format "hub://[<source>/]<item-name>[:<tag>]"
86
+ :param local_path: Path to target directory for the step files. Ignored when download_files is set to False.
87
+ Defaults to the current working directory.
88
+ :param download_files: When set to True, the step code files are downloaded
89
+ :param include_example: When set to True, the example notebook will also be downloaded (ignored if download_files is
90
+ False)
91
+
92
+ :return: HubStep object
93
+ """
94
+ item_yaml_url, is_hub_uri = extend_hub_uri_if_needed(
95
+ uri=url, asset_type=HubSourceType.steps, file="item.yaml"
96
+ )
97
+ if not is_hub_uri:
98
+ raise MLRunInvalidArgumentError("Not a valid hub URL")
99
+ yaml_obj = get_object(url=item_yaml_url)
100
+ item_yaml = yaml.safe_load(yaml_obj)
101
+ spec = item_yaml.pop("spec", {})
102
+ class_name = item_yaml.pop("className", "")
103
+ default_handler = item_yaml.pop("defaultHandler", "")
104
+ hub_step = HubStep(
105
+ **item_yaml,
106
+ **spec,
107
+ class_name=class_name,
108
+ default_handler=default_handler,
109
+ url=url,
110
+ )
111
+ if download_files:
112
+ hub_step.download_files(local_path=local_path, download_example=include_example)
113
+ return hub_step
mlrun/launcher/base.py CHANGED
@@ -16,7 +16,8 @@ import ast
16
16
  import copy
17
17
  import os
18
18
  import uuid
19
- from typing import Any, Callable, Optional, Union
19
+ from collections.abc import Callable
20
+ from typing import Any, Optional, Union
20
21
 
21
22
  import mlrun.common.constants
22
23
  import mlrun.common.runtimes.constants
mlrun/launcher/local.py CHANGED
@@ -13,8 +13,9 @@
13
13
  # limitations under the License.
14
14
  import os
15
15
  import pathlib
16
+ from collections.abc import Callable
16
17
  from os import environ
17
- from typing import Callable, Optional, Union
18
+ from typing import Optional, Union
18
19
 
19
20
  import mlrun.common.constants as mlrun_constants
20
21
  import mlrun.common.schemas.schedule
mlrun/model.py CHANGED
@@ -29,6 +29,7 @@ import pydantic.v1.error_wrappers
29
29
  import mlrun
30
30
  import mlrun.common.constants as mlrun_constants
31
31
  import mlrun.common.schemas.notification
32
+ import mlrun.common.secrets
32
33
  import mlrun.utils.regex
33
34
 
34
35
  from .utils import (
@@ -235,7 +236,9 @@ class ModelObj:
235
236
  fields = list(inspect.signature(cls.__init__).parameters.keys())
236
237
 
237
238
  if init_with_params:
238
- kwargs = {field: struct.pop(field, None) for field in fields}
239
+ kwargs = {
240
+ field: struct.pop(field, None) for field in fields if field in struct
241
+ }
239
242
  kwargs.pop("self", None)
240
243
  new_obj = cls(**kwargs)
241
244
  else:
@@ -1007,6 +1010,7 @@ class RunSpec(ModelObj):
1007
1010
  tolerations=None,
1008
1011
  affinity=None,
1009
1012
  retry=None,
1013
+ auth=None,
1010
1014
  ):
1011
1015
  # A dictionary of parsing configurations that will be read from the inputs the user set. The keys are the inputs
1012
1016
  # keys (parameter names) and the values are the type hint given in the input keys after the colon.
@@ -1048,6 +1052,7 @@ class RunSpec(ModelObj):
1048
1052
  self.tolerations = tolerations or {}
1049
1053
  self.affinity = affinity or {}
1050
1054
  self.retry = retry or {}
1055
+ self.auth = auth or {}
1051
1056
 
1052
1057
  def _serialize_field(
1053
1058
  self, struct: dict, field_name: Optional[str] = None, strip: bool = False
@@ -1616,7 +1621,12 @@ class RunTemplate(ModelObj):
1616
1621
 
1617
1622
  :returns: The RunTemplate object
1618
1623
  """
1619
-
1624
+ if kind == "azure_vault" and isinstance(source, dict):
1625
+ candidate_secret_name = (source.get("k8s_secret") or "").strip()
1626
+ if candidate_secret_name:
1627
+ mlrun.common.secrets.validate_not_forbidden_secret(
1628
+ candidate_secret_name
1629
+ )
1620
1630
  if kind == "vault" and isinstance(source, list):
1621
1631
  source = {"project": self.metadata.project, "secrets": source}
1622
1632
 
@@ -15,5 +15,4 @@
15
15
  from mlrun.common.schemas import ModelEndpoint, ModelEndpointList
16
16
 
17
17
  from .db import get_tsdb_connector
18
- from .db._schedules import delete_model_monitoring_schedules_user_folder
19
18
  from .helpers import get_stream_path
@@ -467,7 +467,7 @@ def read_dataset_as_dataframe(
467
467
  # Get the features and parse to DataFrame:
468
468
  dataset = dataset.get_offline_features(drop_columns=drop_columns).to_dataframe()
469
469
 
470
- elif isinstance(dataset, (list, np.ndarray)):
470
+ elif isinstance(dataset, list | np.ndarray):
471
471
  if not feature_columns:
472
472
  raise mlrun.errors.MLRunInvalidArgumentError(
473
473
  "Feature columns list must be provided when dataset input as from type list or numpy array"
@@ -509,7 +509,7 @@ def read_dataset_as_dataframe(
509
509
  # Turn the `label_columns` into a list by default:
510
510
  if label_columns is None:
511
511
  label_columns = []
512
- elif isinstance(label_columns, (str, int)):
512
+ elif isinstance(label_columns, str | int):
513
513
  label_columns = [label_columns]
514
514
 
515
515
  return dataset, label_columns
@@ -18,7 +18,7 @@ from abc import ABC, abstractmethod
18
18
  from collections import defaultdict
19
19
  from collections.abc import Iterator
20
20
  from contextlib import contextmanager, nullcontext
21
- from datetime import datetime, timedelta, timezone
21
+ from datetime import UTC, datetime, timedelta
22
22
  from typing import Any, Literal, Optional, Union, cast
23
23
 
24
24
  import pandas as pd
@@ -440,7 +440,7 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
440
440
  ) -> list[tuple[str, str]]:
441
441
  if isinstance(endpoints, list):
442
442
  if all(
443
- isinstance(endpoint, (tuple, list)) and len(endpoint) == 2
443
+ isinstance(endpoint, tuple | list) and len(endpoint) == 2
444
444
  for endpoint in endpoints
445
445
  ):
446
446
  # A list of [(name, uid), ...] / [[name, uid], ...] tuples/lists
@@ -690,8 +690,8 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
690
690
 
691
691
  # If `start_dt` and `end_dt` do not include time zone information - change them to UTC
692
692
  if (start_dt.tzinfo is None) and (end_dt.tzinfo is None):
693
- start_dt = start_dt.replace(tzinfo=timezone.utc)
694
- end_dt = end_dt.replace(tzinfo=timezone.utc)
693
+ start_dt = start_dt.replace(tzinfo=UTC)
694
+ end_dt = end_dt.replace(tzinfo=UTC)
695
695
  elif (start_dt.tzinfo is None) or (end_dt.tzinfo is None):
696
696
  raise mlrun.errors.MLRunValueError(
697
697
  "The start and end times must either both include time zone information or both be naive (no time "
@@ -850,6 +850,11 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
850
850
  * ``base_period``, ``int``
851
851
  * ``write_output``, ``bool``
852
852
  * ``existing_data_handling``, ``str``
853
+ * ``_init_args``, ``dict`` - the arguments for the application class constructor
854
+ (equivalent to ``class_arguments``)
855
+
856
+ See :py:meth:`~ModelMonitoringApplicationBase.evaluate` for more details
857
+ about these inputs and params.
853
858
 
854
859
  For Git sources, add the source archive to the returned job and change the handler:
855
860
 
@@ -928,6 +933,7 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
928
933
  image: Optional[str] = None,
929
934
  with_repo: Optional[bool] = False,
930
935
  class_handler: Optional[str] = None,
936
+ class_arguments: Optional[dict[str, Any]] = None,
931
937
  requirements: Optional[Union[str, list[str]]] = None,
932
938
  requirements_file: str = "",
933
939
  endpoints: Union[list[tuple[str, str]], list[str], Literal["all"], None] = None,
@@ -963,7 +969,10 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
963
969
  You do not need to have a model endpoint to use this option.
964
970
  :param image: Docker image to run the job on (when running remotely).
965
971
  :param with_repo: Whether to clone the current repo to the build source.
966
- :param class_handler: The relative path to the class, useful when using Git sources or code from images.
972
+ :param class_handler: The relative path to the application class, useful when using Git sources or code
973
+ from images.
974
+ :param class_arguments: The arguments for the application class constructor. These are passed to the
975
+ class ``__init__``. The values must be JSON-serializable.
967
976
  :param requirements: List of Python requirements to be installed in the image.
968
977
  :param requirements_file: Path to a Python requirements file to be installed in the image.
969
978
  :param endpoints: The model endpoints to get the data from. The options are:
@@ -1041,7 +1050,9 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
1041
1050
  project=project,
1042
1051
  )
1043
1052
 
1044
- params: dict[str, Union[list, str, int, None, ds_profile.DatastoreProfile]] = {}
1053
+ params: dict[
1054
+ str, Union[list, dict, str, int, None, ds_profile.DatastoreProfile]
1055
+ ] = {}
1045
1056
  if endpoints:
1046
1057
  params["endpoints"] = endpoints
1047
1058
  if sample_data is None:
@@ -1077,6 +1088,9 @@ class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
1077
1088
  )
1078
1089
  params["stream_profile"] = stream_profile
1079
1090
 
1091
+ if class_arguments:
1092
+ params["_init_args"] = class_arguments
1093
+
1080
1094
  inputs: dict[str, str] = {}
1081
1095
  for data, identifier in [
1082
1096
  (sample_data, "sample_data"),
@@ -249,6 +249,7 @@ class MonitoringApplicationContext:
249
249
  project=self.project_name,
250
250
  endpoint_id=self.endpoint_id,
251
251
  feature_analysis=True,
252
+ tsdb_metrics=False,
252
253
  )
253
254
  return self._model_endpoint
254
255
 
@@ -139,12 +139,10 @@ class _BatchWindow:
139
139
  self._start, self._stop - self._step + 1, self._step
140
140
  ):
141
141
  entered = True
142
- start_time = datetime.datetime.fromtimestamp(
143
- timestamp, tz=datetime.timezone.utc
144
- )
142
+ start_time = datetime.datetime.fromtimestamp(timestamp, tz=datetime.UTC)
145
143
  end_time = datetime.datetime.fromtimestamp(
146
144
  timestamp - self.TIMESTAMP_RESOLUTION_MICRO + self._step,
147
- tz=datetime.timezone.utc,
145
+ tz=datetime.UTC,
148
146
  )
149
147
  yield _Interval(start_time, end_time)
150
148
 
@@ -164,23 +162,15 @@ class _BatchWindow:
164
162
  # If the last analyzed time is earlier than the stop time,
165
163
  # yield the final partial interval from last_analyzed to stop
166
164
  yield _Interval(
167
- datetime.datetime.fromtimestamp(
168
- last_analyzed, tz=datetime.timezone.utc
169
- ),
170
- datetime.datetime.fromtimestamp(
171
- self._stop, tz=datetime.timezone.utc
172
- ),
165
+ datetime.datetime.fromtimestamp(last_analyzed, tz=datetime.UTC),
166
+ datetime.datetime.fromtimestamp(self._stop, tz=datetime.UTC),
173
167
  )
174
168
  else:
175
169
  # The time span between the start and end of the batch is shorter than the step,
176
170
  # so we need to yield a partial interval covering that range.
177
171
  yield _Interval(
178
- datetime.datetime.fromtimestamp(
179
- self._start, tz=datetime.timezone.utc
180
- ),
181
- datetime.datetime.fromtimestamp(
182
- self._stop, tz=datetime.timezone.utc
183
- ),
172
+ datetime.datetime.fromtimestamp(self._start, tz=datetime.UTC),
173
+ datetime.datetime.fromtimestamp(self._stop, tz=datetime.UTC),
184
174
  )
185
175
 
186
176
  self._update_last_analyzed(last_analyzed=self._stop)
@@ -866,7 +856,7 @@ class MonitoringApplicationController:
866
856
  last_request = last_request_dict.get(endpoint.metadata.uid, None)
867
857
  if isinstance(last_request, float):
868
858
  last_request = datetime.datetime.fromtimestamp(
869
- last_request, tz=datetime.timezone.utc
859
+ last_request, tz=datetime.UTC
870
860
  )
871
861
  elif isinstance(last_request, pd.Timestamp):
872
862
  last_request = last_request.to_pydatetime()
@@ -13,15 +13,12 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import json
16
- import sys
17
16
  from abc import ABC, abstractmethod
18
17
  from contextlib import AbstractContextManager
19
18
  from datetime import datetime
20
19
  from types import TracebackType
21
20
  from typing import TYPE_CHECKING, Final, Optional
22
21
 
23
- import botocore.exceptions
24
-
25
22
  import mlrun
26
23
  import mlrun.common.schemas as schemas
27
24
  import mlrun.errors
@@ -30,10 +27,7 @@ import mlrun.utils.helpers
30
27
  from mlrun.utils import logger
31
28
 
32
29
  if TYPE_CHECKING:
33
- if sys.version_info >= (3, 11):
34
- from typing import Self
35
- else:
36
- from typing_extensions import Self
30
+ from typing import Self
37
31
 
38
32
 
39
33
  class ModelMonitoringSchedulesFileBase(AbstractContextManager, ABC):
@@ -88,16 +82,8 @@ class ModelMonitoringSchedulesFileBase(AbstractContextManager, ABC):
88
82
  except (
89
83
  mlrun.errors.MLRunNotFoundError,
90
84
  # Different errors are raised for S3 or local storage, see ML-8042
91
- botocore.exceptions.ClientError,
92
85
  FileNotFoundError,
93
- ) as err:
94
- if (
95
- isinstance(err, botocore.exceptions.ClientError)
96
- # Add a log only to "NoSuchKey" errors codes - equivalent to `FileNotFoundError`
97
- and err.response["Error"]["Code"] != "NoSuchKey"
98
- ):
99
- raise
100
-
86
+ ):
101
87
  logger.exception(
102
88
  "The schedules file was not found. It should have been created "
103
89
  "as a part of the model endpoint's creation",
@@ -15,10 +15,9 @@ import abc
15
15
  import json
16
16
  import typing
17
17
  from abc import abstractmethod
18
- from datetime import datetime, timezone
18
+ from datetime import UTC, datetime
19
19
  from typing import cast
20
20
 
21
- import botocore.exceptions
22
21
  import fsspec
23
22
 
24
23
  import mlrun.datastore.base
@@ -83,23 +82,13 @@ class ModelMonitoringStatsFile(abc.ABC):
83
82
  content = json.loads(self._item.get().decode())
84
83
  timestamp = content.get("timestamp")
85
84
  if timestamp is not None:
86
- timestamp = datetime.fromisoformat(timestamp).astimezone(
87
- tz=timezone.utc
88
- )
85
+ timestamp = datetime.fromisoformat(timestamp).astimezone(tz=UTC)
89
86
  return content.get("data"), timestamp
90
87
  except (
91
88
  mlrun.errors.MLRunNotFoundError,
92
89
  # Different errors are raised for S3 or local storage, see ML-8042
93
- botocore.exceptions.ClientError,
94
90
  FileNotFoundError,
95
91
  ) as err:
96
- if (
97
- isinstance(err, botocore.exceptions.ClientError)
98
- # Add a log only to "NoSuchKey" errors codes - equivalent to `FileNotFoundError`
99
- and err.response["Error"]["Code"] != "NoSuchKey"
100
- ):
101
- raise
102
-
103
92
  logger.warning(
104
93
  "The Stats file was not found. It should have been created "
105
94
  "as a part of the model endpoint's creation",