mlrun 1.7.0rc43__py3-none-any.whl → 1.7.0rc55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (68) hide show
  1. mlrun/__main__.py +4 -2
  2. mlrun/artifacts/manager.py +3 -1
  3. mlrun/common/formatters/__init__.py +1 -0
  4. mlrun/{model_monitoring/application.py → common/formatters/feature_set.py} +20 -6
  5. mlrun/common/formatters/run.py +3 -0
  6. mlrun/common/schemas/__init__.py +1 -0
  7. mlrun/common/schemas/alert.py +11 -11
  8. mlrun/common/schemas/auth.py +5 -0
  9. mlrun/common/schemas/client_spec.py +0 -1
  10. mlrun/common/schemas/model_monitoring/__init__.py +2 -1
  11. mlrun/common/schemas/model_monitoring/constants.py +23 -9
  12. mlrun/common/schemas/model_monitoring/model_endpoints.py +24 -47
  13. mlrun/common/schemas/notification.py +12 -2
  14. mlrun/common/schemas/workflow.py +10 -2
  15. mlrun/config.py +28 -21
  16. mlrun/data_types/data_types.py +6 -1
  17. mlrun/datastore/base.py +4 -4
  18. mlrun/datastore/s3.py +12 -9
  19. mlrun/datastore/storeytargets.py +9 -6
  20. mlrun/db/base.py +3 -0
  21. mlrun/db/httpdb.py +28 -16
  22. mlrun/db/nopdb.py +24 -4
  23. mlrun/errors.py +7 -1
  24. mlrun/execution.py +40 -7
  25. mlrun/feature_store/api.py +1 -0
  26. mlrun/feature_store/retrieval/spark_merger.py +7 -7
  27. mlrun/frameworks/_common/plan.py +3 -3
  28. mlrun/frameworks/_ml_common/plan.py +1 -1
  29. mlrun/frameworks/parallel_coordinates.py +2 -3
  30. mlrun/launcher/client.py +6 -6
  31. mlrun/model.py +29 -0
  32. mlrun/model_monitoring/api.py +1 -12
  33. mlrun/model_monitoring/applications/__init__.py +1 -2
  34. mlrun/model_monitoring/applications/_application_steps.py +5 -1
  35. mlrun/model_monitoring/applications/base.py +2 -182
  36. mlrun/model_monitoring/applications/context.py +2 -9
  37. mlrun/model_monitoring/applications/evidently_base.py +0 -74
  38. mlrun/model_monitoring/applications/histogram_data_drift.py +2 -2
  39. mlrun/model_monitoring/applications/results.py +4 -4
  40. mlrun/model_monitoring/controller.py +46 -209
  41. mlrun/model_monitoring/db/stores/base/store.py +1 -0
  42. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +15 -1
  43. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +12 -0
  44. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +17 -16
  45. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +49 -39
  46. mlrun/model_monitoring/helpers.py +13 -15
  47. mlrun/model_monitoring/writer.py +3 -1
  48. mlrun/projects/operations.py +11 -8
  49. mlrun/projects/pipelines.py +35 -16
  50. mlrun/projects/project.py +52 -24
  51. mlrun/render.py +3 -3
  52. mlrun/runtimes/daskjob.py +1 -1
  53. mlrun/runtimes/kubejob.py +6 -6
  54. mlrun/runtimes/nuclio/api_gateway.py +12 -0
  55. mlrun/runtimes/nuclio/application/application.py +3 -3
  56. mlrun/runtimes/nuclio/function.py +41 -0
  57. mlrun/runtimes/nuclio/serving.py +2 -2
  58. mlrun/runtimes/pod.py +19 -13
  59. mlrun/serving/server.py +2 -0
  60. mlrun/utils/helpers.py +62 -16
  61. mlrun/utils/version/version.json +2 -2
  62. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/METADATA +126 -44
  63. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/RECORD +67 -68
  64. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/WHEEL +1 -1
  65. mlrun/model_monitoring/evidently_application.py +0 -20
  66. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/LICENSE +0 -0
  67. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/entry_points.txt +0 -0
  68. {mlrun-1.7.0rc43.dist-info → mlrun-1.7.0rc55.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -67,13 +67,7 @@ from ..features import Feature
67
67
  from ..model import EntrypointParam, ImageBuilder, ModelObj
68
68
  from ..run import code_to_function, get_object, import_function, new_function
69
69
  from ..secrets import SecretsStore
70
- from ..utils import (
71
- is_ipython,
72
- is_relative_path,
73
- is_yaml_path,
74
- logger,
75
- update_in,
76
- )
70
+ from ..utils import is_jupyter, is_relative_path, is_yaml_path, logger, update_in
77
71
  from ..utils.clones import (
78
72
  add_credentials_git_remote_url,
79
73
  clone_git,
@@ -714,7 +708,7 @@ def _load_project_from_db(url, secrets, user_project=False):
714
708
 
715
709
  def _delete_project_from_db(project_name, secrets, deletion_strategy):
716
710
  db = mlrun.db.get_run_db(secrets=secrets)
717
- return db.delete_project(project_name, deletion_strategy=deletion_strategy)
711
+ db.delete_project(project_name, deletion_strategy=deletion_strategy)
718
712
 
719
713
 
720
714
  def _load_project_file(url, name="", secrets=None, allow_cross_project=None):
@@ -1558,7 +1552,7 @@ class MlrunProject(ModelObj):
1558
1552
  url = path.normpath(path.join(self.spec.get_code_path(), url))
1559
1553
 
1560
1554
  if (not in_context or check_path_in_context) and not path.isfile(url):
1561
- raise mlrun.errors.MLRunNotFoundError(f"{url} not found")
1555
+ raise FileNotFoundError(f"{url} not found")
1562
1556
 
1563
1557
  return url, in_context
1564
1558
 
@@ -1599,7 +1593,9 @@ class MlrunProject(ModelObj):
1599
1593
  :param format: artifact file format: csv, png, ..
1600
1594
  :param tag: version tag
1601
1595
  :param target_path: absolute target path (instead of using artifact_path + local_path)
1602
- :param upload: upload to datastore (default is True)
1596
+ :param upload: Whether to upload the artifact to the datastore. If not provided, and the `local_path`
1597
+ is not a directory, upload occurs by default. Directories are uploaded only when this
1598
+ flag is explicitly set to `True`.
1603
1599
  :param labels: a set of key/value labels to tag the artifact with
1604
1600
 
1605
1601
  :returns: artifact object
@@ -1954,7 +1950,6 @@ class MlrunProject(ModelObj):
1954
1950
  application_class: typing.Union[
1955
1951
  str,
1956
1952
  mm_app.ModelMonitoringApplicationBase,
1957
- mm_app.ModelMonitoringApplicationBaseV2,
1958
1953
  ] = None,
1959
1954
  name: str = None,
1960
1955
  image: str = None,
@@ -2022,7 +2017,6 @@ class MlrunProject(ModelObj):
2022
2017
  application_class: typing.Union[
2023
2018
  str,
2024
2019
  mm_app.ModelMonitoringApplicationBase,
2025
- mm_app.ModelMonitoringApplicationBaseV2,
2026
2020
  ] = None,
2027
2021
  name: str = None,
2028
2022
  image: str = None,
@@ -2080,7 +2074,6 @@ class MlrunProject(ModelObj):
2080
2074
  application_class: typing.Union[
2081
2075
  str,
2082
2076
  mm_app.ModelMonitoringApplicationBase,
2083
- mm_app.ModelMonitoringApplicationBaseV2,
2084
2077
  None,
2085
2078
  ] = None,
2086
2079
  name: typing.Optional[str] = None,
@@ -2169,7 +2162,8 @@ class MlrunProject(ModelObj):
2169
2162
 
2170
2163
  :param default_controller_image: Deprecated.
2171
2164
  :param base_period: The time period in minutes in which the model monitoring controller
2172
- function is triggered. By default, the base period is 10 minutes.
2165
+ function is triggered. By default, the base period is 10 minutes
2166
+ (which is also the minimum value for production environments).
2173
2167
  :param image: The image of the model monitoring controller, writer, monitoring
2174
2168
  stream & histogram data drift functions, which are real time nuclio
2175
2169
  functions. By default, the image is mlrun/mlrun.
@@ -2188,6 +2182,12 @@ class MlrunProject(ModelObj):
2188
2182
  FutureWarning,
2189
2183
  )
2190
2184
  image = default_controller_image
2185
+ if base_period < 10:
2186
+ logger.warn(
2187
+ "enable_model_monitoring: 'base_period' < 10 minutes is not supported in production environments",
2188
+ project=self.name,
2189
+ )
2190
+
2191
2191
  db = mlrun.db.get_run_db(secrets=self._secrets)
2192
2192
  db.enable_model_monitoring(
2193
2193
  project=self.name,
@@ -2439,7 +2439,7 @@ class MlrunProject(ModelObj):
2439
2439
  ):
2440
2440
  # if function path is not provided and it is not a module (no ".")
2441
2441
  # use the current notebook as default
2442
- if is_ipython:
2442
+ if is_jupyter:
2443
2443
  from IPython import get_ipython
2444
2444
 
2445
2445
  kernel = get_ipython()
@@ -2842,11 +2842,13 @@ class MlrunProject(ModelObj):
2842
2842
  The function objects are synced against the definitions spec in `self.spec._function_definitions`.
2843
2843
  Referenced files/URLs in the function spec will be reloaded.
2844
2844
  Function definitions are parsed by the following precedence:
2845
- 1. Contains runtime spec.
2846
- 2. Contains module in the project's context.
2847
- 3. Contains path to function definition (yaml, DB, Hub).
2848
- 4. Contains path to .ipynb or .py files.
2849
- 5. Contains a Nuclio/Serving function image / an 'Application' kind definition.
2845
+
2846
+ 1. Contains runtime spec.
2847
+ 2. Contains module in the project's context.
2848
+ 3. Contains path to function definition (yaml, DB, Hub).
2849
+ 4. Contains path to .ipynb or .py files.
2850
+ 5. Contains a Nuclio/Serving function image / an 'Application' kind definition.
2851
+
2850
2852
  If function definition is already an object, some project metadata updates will apply however,
2851
2853
  it will not be reloaded.
2852
2854
 
@@ -2902,6 +2904,16 @@ class MlrunProject(ModelObj):
2902
2904
  continue
2903
2905
 
2904
2906
  raise mlrun.errors.MLRunMissingDependencyError(message) from exc
2907
+
2908
+ except Exception as exc:
2909
+ if silent:
2910
+ logger.warn(
2911
+ "Failed to instantiate function",
2912
+ name=name,
2913
+ error=mlrun.utils.err_to_str(exc),
2914
+ )
2915
+ continue
2916
+ raise exc
2905
2917
  else:
2906
2918
  message = f"Function {name} must be an object or dict."
2907
2919
  if silent:
@@ -3060,6 +3072,7 @@ class MlrunProject(ModelObj):
3060
3072
  source: str = None,
3061
3073
  cleanup_ttl: int = None,
3062
3074
  notifications: list[mlrun.model.Notification] = None,
3075
+ workflow_runner_node_selector: typing.Optional[dict[str, str]] = None,
3063
3076
  ) -> _PipelineRunStatus:
3064
3077
  """Run a workflow using kubeflow pipelines
3065
3078
 
@@ -3088,15 +3101,20 @@ class MlrunProject(ModelObj):
3088
3101
 
3089
3102
  * Remote URL which is loaded dynamically to the workflow runner.
3090
3103
  * A path to the project's context on the workflow runner's image.
3091
- Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
3092
- (enriched when building a project image with source, see `MlrunProject.build_image`).
3093
- For other engines the source is used to validate that the code is up-to-date.
3104
+ Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
3105
+ (enriched when building a project image with source, see `MlrunProject.build_image`).
3106
+ For other engines the source is used to validate that the code is up-to-date.
3107
+
3094
3108
  :param cleanup_ttl:
3095
3109
  Pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
3096
3110
  workflow and all its resources are deleted)
3097
3111
  :param notifications:
3098
3112
  List of notifications to send for workflow completion
3099
-
3113
+ :param workflow_runner_node_selector:
3114
+ Defines the node selector for the workflow runner pod when using a remote engine.
3115
+ This allows you to control and specify where the workflow runner pod will be scheduled.
3116
+ This setting is only relevant when the engine is set to 'remote' or for scheduled workflows,
3117
+ and it will be ignored if the workflow is not run on a remote engine.
3100
3118
  :returns: ~py:class:`~mlrun.projects.pipelines._PipelineRunStatus` instance
3101
3119
  """
3102
3120
 
@@ -3162,6 +3180,16 @@ class MlrunProject(ModelObj):
3162
3180
  )
3163
3181
  inner_engine = get_workflow_engine(engine_kind, local).engine
3164
3182
  workflow_spec.engine = inner_engine or workflow_engine.engine
3183
+ if workflow_runner_node_selector:
3184
+ if workflow_engine.engine == "remote":
3185
+ workflow_spec.workflow_runner_node_selector = (
3186
+ workflow_runner_node_selector
3187
+ )
3188
+ else:
3189
+ logger.warn(
3190
+ "'workflow_runner_node_selector' applies only to remote engines"
3191
+ " and is ignored for non-remote runs."
3192
+ )
3165
3193
 
3166
3194
  run = workflow_engine.run(
3167
3195
  self,
mlrun/render.py CHANGED
@@ -22,7 +22,7 @@ import mlrun.utils
22
22
 
23
23
  from .config import config
24
24
  from .datastore import uri_to_ipython
25
- from .utils import dict_to_list, get_in, is_ipython
25
+ from .utils import dict_to_list, get_in, is_jupyter
26
26
 
27
27
  JUPYTER_SERVER_ROOT = environ.get("HOME", "/User")
28
28
  supported_viewers = [
@@ -181,8 +181,8 @@ def run_to_html(results, display=True):
181
181
 
182
182
 
183
183
  def ipython_display(html, display=True, alt_text=None):
184
- if display and html and is_ipython:
185
- import IPython
184
+ if display and html and is_jupyter:
185
+ import IPython.display
186
186
 
187
187
  IPython.display.display(IPython.display.HTML(html))
188
188
  elif alt_text:
mlrun/runtimes/daskjob.py CHANGED
@@ -379,7 +379,7 @@ class DaskCluster(KubejobRuntime):
379
379
  :param show_on_failure: show logs only in case of build failure
380
380
  :param force_build: force building the image, even when no changes were made
381
381
 
382
- :return True if the function is ready (deployed)
382
+ :return: True if the function is ready (deployed)
383
383
  """
384
384
  return super().deploy(
385
385
  watch,
mlrun/runtimes/kubejob.py CHANGED
@@ -11,7 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
14
+ import typing
15
15
  import warnings
16
16
 
17
17
  from mlrun_pipelines.common.ops import build_op
@@ -143,11 +143,11 @@ class KubejobRuntime(KubeResource):
143
143
 
144
144
  def deploy(
145
145
  self,
146
- watch=True,
147
- with_mlrun=None,
148
- skip_deployed=False,
149
- is_kfp=False,
150
- mlrun_version_specifier=None,
146
+ watch: bool = True,
147
+ with_mlrun: typing.Optional[bool] = None,
148
+ skip_deployed: bool = False,
149
+ is_kfp: bool = False,
150
+ mlrun_version_specifier: typing.Optional[bool] = None,
151
151
  builder_env: dict = None,
152
152
  show_on_failure: bool = False,
153
153
  force_build: bool = False,
@@ -22,6 +22,7 @@ from nuclio.auth import AuthKinds as NuclioAuthKinds
22
22
 
23
23
  import mlrun
24
24
  import mlrun.common.constants as mlrun_constants
25
+ import mlrun.common.helpers
25
26
  import mlrun.common.schemas as schemas
26
27
  import mlrun.common.types
27
28
  from mlrun.model import ModelObj
@@ -202,8 +203,13 @@ class APIGatewaySpec(ModelObj):
202
203
  self.project = project
203
204
  self.ports = ports
204
205
 
206
+ self.enrich()
205
207
  self.validate(project=project, functions=functions, canary=canary, ports=ports)
206
208
 
209
+ def enrich(self):
210
+ if self.path and not self.path.startswith("/"):
211
+ self.path = f"/{self.path}"
212
+
207
213
  def validate(
208
214
  self,
209
215
  project: str,
@@ -587,6 +593,12 @@ class APIGateway(ModelObj):
587
593
  self.metadata.annotations, gateway_timeout
588
594
  )
589
595
 
596
+ def with_annotations(self, annotations: dict):
597
+ """set a key/value annotations in the metadata of the api gateway"""
598
+ for key, value in annotations.items():
599
+ self.metadata.annotations[key] = str(value)
600
+ return self
601
+
590
602
  @classmethod
591
603
  def from_scheme(cls, api_gateway: schemas.APIGateway):
592
604
  project = api_gateway.metadata.labels.get(
@@ -438,9 +438,10 @@ class ApplicationRuntime(RemoteRuntime):
438
438
  """
439
439
  Create the application API gateway. Once the application is deployed, the API gateway can be created.
440
440
  An application without an API gateway is not accessible.
441
+
441
442
  :param name: The name of the API gateway
442
443
  :param path: Optional path of the API gateway, default value is "/".
443
- The given path should be supported by the deployed application
444
+ The given path should be supported by the deployed application
444
445
  :param direct_port_access: Set True to allow direct port access to the application sidecar
445
446
  :param authentication_mode: API Gateway authentication mode
446
447
  :param authentication_creds: API Gateway basic authentication credentials as a tuple (username, password)
@@ -449,8 +450,7 @@ class ApplicationRuntime(RemoteRuntime):
449
450
  :param set_as_default: Set the API gateway as the default for the application (`status.api_gateway`)
450
451
  :param gateway_timeout: nginx ingress timeout in sec (request timeout, when will the gateway return an
451
452
  error)
452
-
453
- :return: The API gateway URL
453
+ :return: The API gateway URL
454
454
  """
455
455
  if not name:
456
456
  raise mlrun.errors.MLRunInvalidArgumentError(
@@ -23,6 +23,7 @@ import inflection
23
23
  import nuclio
24
24
  import nuclio.utils
25
25
  import requests
26
+ import semver
26
27
  from aiohttp.client import ClientSession
27
28
  from kubernetes import client
28
29
  from mlrun_pipelines.common.mounts import VolumeMount
@@ -296,10 +297,37 @@ class RemoteRuntime(KubeResource):
296
297
  """
297
298
  if hasattr(spec, "to_dict"):
298
299
  spec = spec.to_dict()
300
+
301
+ self._validate_triggers(spec)
302
+
299
303
  spec["name"] = name
300
304
  self.spec.config[f"spec.triggers.{name}"] = spec
301
305
  return self
302
306
 
307
+ def _validate_triggers(self, spec):
308
+ # ML-7763 / NUC-233
309
+ min_nuclio_version = "1.13.12"
310
+ if mlconf.nuclio_version and semver.VersionInfo.parse(
311
+ mlconf.nuclio_version
312
+ ) < semver.VersionInfo.parse(min_nuclio_version):
313
+ explicit_ack_enabled = False
314
+ num_triggers = 0
315
+ trigger_name = spec.get("name", "UNKNOWN")
316
+ for key, config in [(f"spec.triggers.{trigger_name}", spec)] + list(
317
+ self.spec.config.items()
318
+ ):
319
+ if key.startswith("spec.triggers."):
320
+ num_triggers += 1
321
+ explicit_ack_enabled = (
322
+ config.get("explicitAckMode", "disable") != "disable"
323
+ )
324
+
325
+ if num_triggers > 1 and explicit_ack_enabled:
326
+ raise mlrun.errors.MLRunInvalidArgumentError(
327
+ "Multiple triggers cannot be used in conjunction with explicit ack. "
328
+ f"Please upgrade to nuclio {min_nuclio_version} or newer."
329
+ )
330
+
303
331
  def with_source_archive(
304
332
  self,
305
333
  source,
@@ -495,6 +523,11 @@ class RemoteRuntime(KubeResource):
495
523
  extra_attributes = extra_attributes or {}
496
524
  if ack_window_size:
497
525
  extra_attributes["ackWindowSize"] = ack_window_size
526
+
527
+ access_key = kwargs.pop("access_key", None)
528
+ if not access_key:
529
+ access_key = self._resolve_v3io_access_key()
530
+
498
531
  self.add_trigger(
499
532
  name,
500
533
  V3IOStreamTrigger(
@@ -506,6 +539,7 @@ class RemoteRuntime(KubeResource):
506
539
  webapi=endpoint or "http://v3io-webapi:8081",
507
540
  extra_attributes=extra_attributes,
508
541
  read_batch_size=256,
542
+ access_key=access_key,
509
543
  **kwargs,
510
544
  ),
511
545
  )
@@ -1241,6 +1275,13 @@ class RemoteRuntime(KubeResource):
1241
1275
 
1242
1276
  return self._resolve_invocation_url("", force_external_address)
1243
1277
 
1278
+ @staticmethod
1279
+ def _resolve_v3io_access_key():
1280
+ # Nuclio supports generating access key for v3io stream trigger only from version 1.13.11
1281
+ if validate_nuclio_version_compatibility("1.13.11"):
1282
+ return mlrun.model.Credentials.generate_access_key
1283
+ return None
1284
+
1244
1285
 
1245
1286
  def parse_logs(logs):
1246
1287
  logs = json.loads(logs)
@@ -314,8 +314,8 @@ class ServingRuntime(RemoteRuntime):
314
314
  tracking_policy: Optional[Union["TrackingPolicy", dict]] = None,
315
315
  enable_tracking: bool = True,
316
316
  ) -> None:
317
- """apply on your serving function to monitor a deployed model, including real-time dashboards to detect drift
318
- and analyze performance.
317
+ """Apply on your serving function to monitor a deployed model, including real-time dashboards to detect drift
318
+ and analyze performance.
319
319
 
320
320
  :param stream_path: Path/url of the tracking stream e.g. v3io:///users/mike/mystream
321
321
  you can use the "dummy://" path for test/simulation.
mlrun/runtimes/pod.py CHANGED
@@ -1107,12 +1107,12 @@ class KubeResource(BaseRuntime, KfpAdapterMixin):
1107
1107
 
1108
1108
  :param state_thresholds: A dictionary of state to threshold. The supported states are:
1109
1109
 
1110
- * pending_scheduled - The pod/crd is scheduled on a node but not yet running
1111
- * pending_not_scheduled - The pod/crd is not yet scheduled on a node
1112
- * executing - The pod/crd started and is running
1113
- * image_pull_backoff - The pod/crd is in image pull backoff
1114
- See mlrun.mlconf.function.spec.state_thresholds for the default thresholds.
1110
+ * pending_scheduled - The pod/crd is scheduled on a node but not yet running
1111
+ * pending_not_scheduled - The pod/crd is not yet scheduled on a node
1112
+ * executing - The pod/crd started and is running
1113
+ * image_pull_backoff - The pod/crd is in image pull backoff
1115
1114
 
1115
+ See :code:`mlrun.mlconf.function.spec.state_thresholds` for the default thresholds.
1116
1116
  :param patch: Whether to merge the given thresholds with the existing thresholds (True, default)
1117
1117
  or override them (False)
1118
1118
  """
@@ -1347,20 +1347,26 @@ class KubeResource(BaseRuntime, KfpAdapterMixin):
1347
1347
 
1348
1348
  def _build_image(
1349
1349
  self,
1350
- builder_env,
1351
- force_build,
1352
- mlrun_version_specifier,
1353
- show_on_failure,
1354
- skip_deployed,
1355
- watch,
1356
- is_kfp,
1357
- with_mlrun,
1350
+ builder_env: dict,
1351
+ force_build: bool,
1352
+ mlrun_version_specifier: typing.Optional[bool],
1353
+ show_on_failure: bool,
1354
+ skip_deployed: bool,
1355
+ watch: bool,
1356
+ is_kfp: bool,
1357
+ with_mlrun: typing.Optional[bool],
1358
1358
  ):
1359
1359
  # When we're in pipelines context we must watch otherwise the pipelines pod will exit before the operation
1360
1360
  # is actually done. (when a pipelines pod exits, the pipeline step marked as done)
1361
1361
  if is_kfp:
1362
1362
  watch = True
1363
1363
 
1364
+ if skip_deployed and self.requires_build() and not self.is_deployed():
1365
+ logger.warning(
1366
+ f"Even though {skip_deployed=}, the build might be triggered due to the function's configuration. "
1367
+ "See requires_build() and is_deployed() for reasoning."
1368
+ )
1369
+
1364
1370
  db = self._get_db()
1365
1371
  data = db.remote_builder(
1366
1372
  self,
mlrun/serving/server.py CHANGED
@@ -401,6 +401,8 @@ def v2_serving_handler(context, event, get_body=False):
401
401
  "kafka-cluster",
402
402
  "v3ioStream",
403
403
  "v3io-stream",
404
+ "rabbit-mq",
405
+ "rabbitMq",
404
406
  ):
405
407
  event.path = "/"
406
408
 
mlrun/utils/helpers.py CHANGED
@@ -41,7 +41,7 @@ import semver
41
41
  import yaml
42
42
  from dateutil import parser
43
43
  from mlrun_pipelines.models import PipelineRun
44
- from pandas._libs.tslibs.timestamps import Timedelta, Timestamp
44
+ from pandas import Timedelta, Timestamp
45
45
  from yaml.representer import RepresenterError
46
46
 
47
47
  import mlrun
@@ -111,9 +111,12 @@ def get_artifact_target(item: dict, project=None):
111
111
  project_str = project or item["metadata"].get("project")
112
112
  tree = item["metadata"].get("tree")
113
113
  tag = item["metadata"].get("tag")
114
+ kind = item.get("kind")
114
115
 
115
- if item.get("kind") in {"dataset", "model", "artifact"} and db_key:
116
- target = f"{DB_SCHEMA}://{StorePrefix.Artifact}/{project_str}/{db_key}"
116
+ if kind in {"dataset", "model", "artifact"} and db_key:
117
+ target = (
118
+ f"{DB_SCHEMA}://{StorePrefix.kind_to_prefix(kind)}/{project_str}/{db_key}"
119
+ )
117
120
  target += f":{tag}" if tag else ":latest"
118
121
  if tree:
119
122
  target += f"@{tree}"
@@ -133,18 +136,25 @@ def is_legacy_artifact(artifact):
133
136
  logger = create_logger(config.log_level, config.log_formatter, "mlrun", sys.stdout)
134
137
  missing = object()
135
138
 
136
- is_ipython = False
139
+ is_ipython = False # is IPython terminal, including Jupyter
140
+ is_jupyter = False # is Jupyter notebook/lab terminal
137
141
  try:
138
- import IPython
142
+ import IPython.core.getipython
143
+
144
+ ipy = IPython.core.getipython.get_ipython()
145
+
146
+ is_ipython = ipy is not None
147
+ is_jupyter = (
148
+ is_ipython
149
+ # not IPython
150
+ and "Terminal" not in str(type(ipy))
151
+ )
139
152
 
140
- ipy = IPython.get_ipython()
141
- # if its IPython terminal ignore (cant show html)
142
- if ipy and "Terminal" not in str(type(ipy)):
143
- is_ipython = True
144
- except ImportError:
153
+ del ipy
154
+ except ModuleNotFoundError:
145
155
  pass
146
156
 
147
- if is_ipython and config.nest_asyncio_enabled in ["1", "True"]:
157
+ if is_jupyter and config.nest_asyncio_enabled in ["1", "True"]:
148
158
  # bypass Jupyter asyncio bug
149
159
  import nest_asyncio
150
160
 
@@ -1421,11 +1431,7 @@ def is_running_in_jupyter_notebook() -> bool:
1421
1431
  Check if the code is running inside a Jupyter Notebook.
1422
1432
  :return: True if running inside a Jupyter Notebook, False otherwise.
1423
1433
  """
1424
- import IPython
1425
-
1426
- ipy = IPython.get_ipython()
1427
- # if its IPython terminal, it isn't a Jupyter ipython
1428
- return ipy and "Terminal" not in str(type(ipy))
1434
+ return is_jupyter
1429
1435
 
1430
1436
 
1431
1437
  def create_ipython_display():
@@ -1776,3 +1782,43 @@ def _reload(module, max_recursion_depth):
1776
1782
  attribute = getattr(module, attribute_name)
1777
1783
  if type(attribute) is ModuleType:
1778
1784
  _reload(attribute, max_recursion_depth - 1)
1785
+
1786
+
1787
+ def run_with_retry(
1788
+ retry_count: int,
1789
+ func: typing.Callable,
1790
+ retry_on_exceptions: typing.Union[
1791
+ type[Exception],
1792
+ tuple[type[Exception]],
1793
+ ] = None,
1794
+ *args,
1795
+ **kwargs,
1796
+ ):
1797
+ """
1798
+ Executes a function with retry logic upon encountering specified exceptions.
1799
+
1800
+ :param retry_count: The number of times to retry the function execution.
1801
+ :param func: The function to execute.
1802
+ :param retry_on_exceptions: Exception(s) that trigger a retry. Can be a single exception or a tuple of exceptions.
1803
+ :param args: Positional arguments to pass to the function.
1804
+ :param kwargs: Keyword arguments to pass to the function.
1805
+ :return: The result of the function execution if successful.
1806
+ :raises Exception: Re-raises the last exception encountered after all retries are exhausted.
1807
+ """
1808
+ if retry_on_exceptions is None:
1809
+ retry_on_exceptions = (Exception,)
1810
+ elif isinstance(retry_on_exceptions, list):
1811
+ retry_on_exceptions = tuple(retry_on_exceptions)
1812
+
1813
+ last_exception = None
1814
+ for attempt in range(retry_count + 1):
1815
+ try:
1816
+ return func(*args, **kwargs)
1817
+ except retry_on_exceptions as exc:
1818
+ last_exception = exc
1819
+ logger.warning(
1820
+ f"Attempt {{{attempt}/ {retry_count}}} failed with exception: {exc}",
1821
+ )
1822
+ if attempt == retry_count:
1823
+ raise
1824
+ raise last_exception
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "13896619ed9e935aab2e08c075dc12ae6f3e449f",
3
- "version": "1.7.0-rc43"
2
+ "git_commit": "e1c65a391e9fcd99886541d4223d8603402180ce",
3
+ "version": "1.7.0-rc55"
4
4
  }