mlrun 1.10.0rc40__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (150) hide show
  1. mlrun/__init__.py +3 -2
  2. mlrun/__main__.py +0 -4
  3. mlrun/artifacts/dataset.py +2 -2
  4. mlrun/artifacts/plots.py +1 -1
  5. mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
  6. mlrun/auth/nuclio.py +89 -0
  7. mlrun/auth/providers.py +429 -0
  8. mlrun/auth/utils.py +415 -0
  9. mlrun/common/constants.py +7 -0
  10. mlrun/common/model_monitoring/helpers.py +41 -4
  11. mlrun/common/runtimes/constants.py +28 -0
  12. mlrun/common/schemas/__init__.py +13 -3
  13. mlrun/common/schemas/alert.py +2 -2
  14. mlrun/common/schemas/api_gateway.py +3 -0
  15. mlrun/common/schemas/auth.py +10 -10
  16. mlrun/common/schemas/client_spec.py +4 -0
  17. mlrun/common/schemas/constants.py +25 -0
  18. mlrun/common/schemas/frontend_spec.py +1 -8
  19. mlrun/common/schemas/function.py +24 -0
  20. mlrun/common/schemas/hub.py +3 -2
  21. mlrun/common/schemas/model_monitoring/__init__.py +1 -1
  22. mlrun/common/schemas/model_monitoring/constants.py +2 -2
  23. mlrun/common/schemas/secret.py +17 -2
  24. mlrun/common/secrets.py +95 -1
  25. mlrun/common/types.py +10 -10
  26. mlrun/config.py +53 -15
  27. mlrun/data_types/infer.py +2 -2
  28. mlrun/datastore/__init__.py +2 -3
  29. mlrun/datastore/base.py +274 -10
  30. mlrun/datastore/datastore.py +1 -1
  31. mlrun/datastore/datastore_profile.py +49 -17
  32. mlrun/datastore/model_provider/huggingface_provider.py +6 -2
  33. mlrun/datastore/model_provider/model_provider.py +2 -2
  34. mlrun/datastore/model_provider/openai_provider.py +2 -2
  35. mlrun/datastore/s3.py +15 -16
  36. mlrun/datastore/sources.py +1 -1
  37. mlrun/datastore/store_resources.py +4 -4
  38. mlrun/datastore/storeytargets.py +16 -10
  39. mlrun/datastore/targets.py +1 -1
  40. mlrun/datastore/utils.py +16 -3
  41. mlrun/datastore/v3io.py +1 -1
  42. mlrun/db/base.py +36 -12
  43. mlrun/db/httpdb.py +316 -101
  44. mlrun/db/nopdb.py +29 -11
  45. mlrun/errors.py +4 -2
  46. mlrun/execution.py +11 -12
  47. mlrun/feature_store/api.py +1 -1
  48. mlrun/feature_store/common.py +1 -1
  49. mlrun/feature_store/feature_vector_utils.py +1 -1
  50. mlrun/feature_store/steps.py +8 -6
  51. mlrun/frameworks/_common/utils.py +3 -3
  52. mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
  53. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
  54. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
  55. mlrun/frameworks/_ml_common/utils.py +2 -1
  56. mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
  57. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
  58. mlrun/frameworks/onnx/dataset.py +2 -1
  59. mlrun/frameworks/onnx/mlrun_interface.py +2 -1
  60. mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
  61. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
  62. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
  63. mlrun/frameworks/pytorch/utils.py +2 -1
  64. mlrun/frameworks/sklearn/metric.py +2 -1
  65. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
  66. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
  67. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
  68. mlrun/hub/__init__.py +37 -0
  69. mlrun/hub/base.py +142 -0
  70. mlrun/hub/module.py +67 -76
  71. mlrun/hub/step.py +113 -0
  72. mlrun/launcher/base.py +2 -1
  73. mlrun/launcher/local.py +2 -1
  74. mlrun/model.py +12 -2
  75. mlrun/model_monitoring/__init__.py +0 -1
  76. mlrun/model_monitoring/api.py +2 -2
  77. mlrun/model_monitoring/applications/base.py +20 -6
  78. mlrun/model_monitoring/applications/context.py +1 -0
  79. mlrun/model_monitoring/controller.py +7 -17
  80. mlrun/model_monitoring/db/_schedules.py +2 -16
  81. mlrun/model_monitoring/db/_stats.py +2 -13
  82. mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
  83. mlrun/model_monitoring/db/tsdb/base.py +2 -4
  84. mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
  85. mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
  86. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
  87. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
  88. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
  89. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
  90. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
  91. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
  92. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
  93. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
  94. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
  95. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
  96. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
  97. mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
  98. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +4 -6
  99. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +147 -79
  100. mlrun/model_monitoring/features_drift_table.py +2 -1
  101. mlrun/model_monitoring/helpers.py +2 -1
  102. mlrun/model_monitoring/stream_processing.py +18 -16
  103. mlrun/model_monitoring/writer.py +4 -3
  104. mlrun/package/__init__.py +2 -1
  105. mlrun/platforms/__init__.py +0 -44
  106. mlrun/platforms/iguazio.py +1 -1
  107. mlrun/projects/operations.py +11 -10
  108. mlrun/projects/project.py +81 -82
  109. mlrun/run.py +4 -7
  110. mlrun/runtimes/__init__.py +2 -204
  111. mlrun/runtimes/base.py +89 -21
  112. mlrun/runtimes/constants.py +225 -0
  113. mlrun/runtimes/daskjob.py +4 -2
  114. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
  115. mlrun/runtimes/mounts.py +5 -0
  116. mlrun/runtimes/nuclio/__init__.py +12 -8
  117. mlrun/runtimes/nuclio/api_gateway.py +36 -6
  118. mlrun/runtimes/nuclio/application/application.py +200 -32
  119. mlrun/runtimes/nuclio/function.py +154 -49
  120. mlrun/runtimes/nuclio/serving.py +55 -42
  121. mlrun/runtimes/pod.py +59 -10
  122. mlrun/secrets.py +46 -2
  123. mlrun/serving/__init__.py +2 -0
  124. mlrun/serving/remote.py +5 -5
  125. mlrun/serving/routers.py +3 -3
  126. mlrun/serving/server.py +46 -43
  127. mlrun/serving/serving_wrapper.py +6 -2
  128. mlrun/serving/states.py +554 -207
  129. mlrun/serving/steps.py +1 -1
  130. mlrun/serving/system_steps.py +42 -33
  131. mlrun/track/trackers/mlflow_tracker.py +29 -31
  132. mlrun/utils/helpers.py +89 -16
  133. mlrun/utils/http.py +9 -2
  134. mlrun/utils/notifications/notification/git.py +1 -1
  135. mlrun/utils/notifications/notification/mail.py +39 -16
  136. mlrun/utils/notifications/notification_pusher.py +2 -2
  137. mlrun/utils/version/version.json +2 -2
  138. mlrun/utils/version/version.py +3 -4
  139. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +39 -49
  140. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +144 -130
  141. mlrun/db/auth_utils.py +0 -152
  142. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -343
  143. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
  144. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
  145. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1368
  146. mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +0 -51
  147. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
  148. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
  149. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
  150. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/serving/steps.py CHANGED
@@ -45,7 +45,7 @@ class ChoiceByField(storey.Choice):
45
45
  )
46
46
 
47
47
  # Case 3: Invalid type
48
- if not isinstance(outlet, (str, list, tuple)):
48
+ if not isinstance(outlet, str | list | tuple):
49
49
  raise mlrun.errors.MLRunInvalidArgumentTypeError(
50
50
  f"Field '{self.field_name}' must be a string or list of strings "
51
51
  f"but is instead of type '{type(outlet).__name__}'."
@@ -12,6 +12,7 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  import random
15
+ import typing
15
16
  from copy import copy
16
17
  from datetime import timedelta
17
18
  from typing import Any, Optional, Union
@@ -70,10 +71,14 @@ class MonitoringPreProcessor(storey.MapClass):
70
71
  output_schema=output_schema,
71
72
  input_schema=input_schema,
72
73
  )
73
-
74
- outputs, new_output_schema = self.get_listed_data(
75
- event.body.get(model, event.body), result_path, output_schema
76
- )
74
+ if event.body and isinstance(event.body, list):
75
+ outputs, new_output_schema = self.get_listed_data(
76
+ event.body, result_path, output_schema
77
+ )
78
+ else:
79
+ outputs, new_output_schema = self.get_listed_data(
80
+ event.body.get(model, event.body), result_path, output_schema
81
+ )
77
82
  inputs, new_input_schema = self.get_listed_data(
78
83
  event._metadata.get("inputs", {}), input_path, input_schema
79
84
  )
@@ -112,7 +117,7 @@ class MonitoringPreProcessor(storey.MapClass):
112
117
 
113
118
  def get_listed_data(
114
119
  self,
115
- raw_data: dict,
120
+ raw_data: typing.Union[dict, list],
116
121
  data_path: Optional[Union[list[str], str]] = None,
117
122
  schema: Optional[list[str]] = None,
118
123
  ):
@@ -193,8 +198,8 @@ class MonitoringPreProcessor(storey.MapClass):
193
198
  )
194
199
 
195
200
  # Detect if all are scalars ie: int,float,str
196
- all_scalars = all(not isinstance(v, (list, tuple, np.ndarray)) for v in values)
197
- all_lists = all(isinstance(v, (list, tuple, np.ndarray)) for v in values)
201
+ all_scalars = all(not isinstance(v, list | tuple | np.ndarray) for v in values)
202
+ all_lists = all(isinstance(v, list | tuple | np.ndarray) for v in values)
198
203
 
199
204
  if not (all_scalars or all_lists):
200
205
  raise ValueError(
@@ -242,6 +247,21 @@ class MonitoringPreProcessor(storey.MapClass):
242
247
  when = event._metadata.get(model, {}).get(
243
248
  mm_schemas.StreamProcessingEvent.WHEN
244
249
  )
250
+ # if the body is not a dict, use empty labels, error and metrics
251
+ if isinstance(event.body[model], dict):
252
+ body_by_model = event.body[model]
253
+ labels = body_by_model.get("labels") or {}
254
+ error = body_by_model.get(
255
+ mm_schemas.StreamProcessingEvent.ERROR
256
+ )
257
+ metrics = body_by_model.get(
258
+ mm_schemas.StreamProcessingEvent.METRICS
259
+ )
260
+ else:
261
+ labels = {}
262
+ error = None
263
+ metrics = None
264
+
245
265
  monitoring_event_list.append(
246
266
  {
247
267
  mm_schemas.StreamProcessingEvent.MODEL: model,
@@ -257,27 +277,14 @@ class MonitoringPreProcessor(storey.MapClass):
257
277
  ].get(
258
278
  mlrun.common.schemas.MonitoringData.MODEL_ENDPOINT_UID
259
279
  ),
260
- mm_schemas.StreamProcessingEvent.LABELS: event.body[
261
- model
262
- ].get("labels")
263
- or {},
280
+ mm_schemas.StreamProcessingEvent.LABELS: labels,
264
281
  mm_schemas.StreamProcessingEvent.FUNCTION_URI: self.server.function_uri
265
282
  if self.server
266
283
  else None,
267
284
  mm_schemas.StreamProcessingEvent.REQUEST: request,
268
285
  mm_schemas.StreamProcessingEvent.RESPONSE: resp,
269
- mm_schemas.StreamProcessingEvent.ERROR: event.body[model][
270
- mm_schemas.StreamProcessingEvent.ERROR
271
- ]
272
- if mm_schemas.StreamProcessingEvent.ERROR
273
- in event.body[model]
274
- else None,
275
- mm_schemas.StreamProcessingEvent.METRICS: event.body[model][
276
- mm_schemas.StreamProcessingEvent.METRICS
277
- ]
278
- if mm_schemas.StreamProcessingEvent.METRICS
279
- in event.body[model]
280
- else None,
286
+ mm_schemas.StreamProcessingEvent.ERROR: error,
287
+ mm_schemas.StreamProcessingEvent.METRICS: metrics,
281
288
  }
282
289
  )
283
290
  elif monitoring_data:
@@ -289,6 +296,15 @@ class MonitoringPreProcessor(storey.MapClass):
289
296
  when = event._original_timestamp
290
297
  else:
291
298
  when = event._metadata.get(mm_schemas.StreamProcessingEvent.WHEN)
299
+ # if the body is not a dict, use empty labels, error and metrics
300
+ if isinstance(event.body, dict):
301
+ labels = event.body.get("labels") or {}
302
+ error = event.body.get(mm_schemas.StreamProcessingEvent.ERROR)
303
+ metrics = event.body.get(mm_schemas.StreamProcessingEvent.METRICS)
304
+ else:
305
+ labels = {}
306
+ error = None
307
+ metrics = None
292
308
  monitoring_event_list.append(
293
309
  {
294
310
  mm_schemas.StreamProcessingEvent.MODEL: model,
@@ -302,21 +318,14 @@ class MonitoringPreProcessor(storey.MapClass):
302
318
  mm_schemas.StreamProcessingEvent.ENDPOINT_ID: monitoring_data[
303
319
  model
304
320
  ].get(mlrun.common.schemas.MonitoringData.MODEL_ENDPOINT_UID),
305
- mm_schemas.StreamProcessingEvent.LABELS: event.body.get("labels")
306
- or {},
321
+ mm_schemas.StreamProcessingEvent.LABELS: labels,
307
322
  mm_schemas.StreamProcessingEvent.FUNCTION_URI: self.server.function_uri
308
323
  if self.server
309
324
  else None,
310
325
  mm_schemas.StreamProcessingEvent.REQUEST: request,
311
326
  mm_schemas.StreamProcessingEvent.RESPONSE: resp,
312
- mm_schemas.StreamProcessingEvent.ERROR: event.body.get(
313
- mm_schemas.StreamProcessingEvent.ERROR
314
- ),
315
- mm_schemas.StreamProcessingEvent.METRICS: event.body[
316
- mm_schemas.StreamProcessingEvent.METRICS
317
- ]
318
- if mm_schemas.StreamProcessingEvent.METRICS in event.body
319
- else None,
327
+ mm_schemas.StreamProcessingEvent.ERROR: error,
328
+ mm_schemas.StreamProcessingEvent.METRICS: metrics,
320
329
  }
321
330
  )
322
331
  event.body = monitoring_event_list
@@ -217,7 +217,7 @@ class MLFlowTracker(Tracker):
217
217
  handler=handler,
218
218
  run_name=run.info.run_name,
219
219
  project_name=project.name,
220
- uid=run.info.run_uuid,
220
+ uid=run.info.run_id,
221
221
  )
222
222
 
223
223
  # Create a context from the run object:
@@ -373,7 +373,7 @@ class MLFlowTracker(Tracker):
373
373
  # Import the MLFlow run's artifacts to MLRun (model are logged after the rest of artifacts
374
374
  # so the artifacts can be registered as extra data in the models):
375
375
  artifacts = {}
376
- model_paths = []
376
+ model_uris = []
377
377
  for artifact in client.list_artifacts(run_id=run.info.run_id):
378
378
  # Get the artifact's local path (MLFlow suggests that if the artifact is already in the local filesystem
379
379
  # its local path will be returned:
@@ -381,29 +381,29 @@ class MLFlowTracker(Tracker):
381
381
  run_id=run.info.run_id,
382
382
  artifact_path=artifact.path,
383
383
  )
384
- # Check if the artifact is a model (will be logged after the artifacts):
385
- if artifact.is_dir and os.path.exists(
386
- os.path.join(
387
- artifact_local_path, "MLmodel"
388
- ) # Add tag to show model dir
389
- ):
390
- model_paths.append(artifact_local_path)
391
- else:
392
- # Log the artifact:
393
- artifact = MLFlowTracker._log_artifact(
394
- context=context,
395
- key=pathlib.Path(artifact.path).name.replace(".", "_"),
396
- # Mlflow has the same name for files but with different extensions, so we add extension to name
397
- local_path=artifact_local_path,
398
- tmp_path=tmp_dir,
399
- )
400
- artifacts[artifact.key] = artifact
384
+ # Log the artifact:
385
+ artifact = MLFlowTracker._log_artifact(
386
+ context=context,
387
+ key=pathlib.Path(artifact.path).name.replace(".", "_"),
388
+ # Mlflow has the same name for files but with different extensions, so we add extension to name
389
+ local_path=artifact_local_path,
390
+ tmp_path=tmp_dir,
391
+ )
392
+ artifacts[artifact.key] = artifact
393
+
394
+ # get all run model's uri's (artifact_location in mlflow 3.0.0).
395
+ logged_models = mlflow.search_logged_models(
396
+ filter_string=f"source_run_id = '{run.info.run_id}'",
397
+ output_format="list",
398
+ )
399
+ for logged_model in logged_models:
400
+ model_uris.append(logged_model.artifact_location)
401
401
 
402
- for model_path in model_paths:
402
+ for model_uri in model_uris:
403
403
  MLFlowTracker._log_model(
404
404
  context=context,
405
- model_uri=model_path,
406
- key=pathlib.Path(model_path).stem,
405
+ model_uri=model_uri,
406
+ key=pathlib.Path(model_uri).stem,
407
407
  metrics=results,
408
408
  extra_data=artifacts,
409
409
  tmp_path=tmp_dir,
@@ -439,20 +439,18 @@ class MLFlowTracker(Tracker):
439
439
 
440
440
  # Get the model info from MLFlow:
441
441
  model_info = mlflow.models.get_model_info(model_uri=model_uri)
442
+ # Download the model and set the path to local path:
443
+ local_model_path = mlflow.artifacts.download_artifacts(
444
+ artifact_uri=str(model_uri)
445
+ )
446
+ model_path = pathlib.Path(local_model_path)
442
447
 
443
448
  # Prepare the archive path:
444
- model_uri = pathlib.Path(model_uri)
445
- archive_path = pathlib.Path(tmp_path) / f"{model_uri.stem}.zip"
446
- if not os.path.exists(model_uri):
447
- local_path = mlflow.artifacts.download_artifacts(
448
- artifact_uri=str(model_uri)
449
- )
450
- model_uri = pathlib.Path(local_path)
451
-
449
+ archive_path = pathlib.Path(tmp_path) / f"{model_path.name}.zip"
452
450
  # TODO add progress bar for the case of large files
453
451
  # Zip the artifact:
454
452
  with zipfile.ZipFile(archive_path, "w") as zip_file:
455
- for path in model_uri.rglob("*"):
453
+ for path in model_path.rglob("*"):
456
454
  zip_file.write(filename=path, arcname=path.relative_to(model_uri))
457
455
 
458
456
  # Get inputs and outputs info:
mlrun/utils/helpers.py CHANGED
@@ -11,10 +11,10 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
-
15
14
  import asyncio
16
15
  import base64
17
16
  import enum
17
+ import functools
18
18
  import gzip
19
19
  import hashlib
20
20
  import inspect
@@ -30,7 +30,7 @@ import typing
30
30
  import uuid
31
31
  import warnings
32
32
  from copy import deepcopy
33
- from datetime import datetime, timedelta, timezone
33
+ from datetime import UTC, datetime, timedelta, timezone
34
34
  from importlib import import_module, reload
35
35
  from os import path
36
36
  from types import ModuleType
@@ -462,7 +462,7 @@ def get_pretty_types_names(types):
462
462
  return types[0].__name__
463
463
 
464
464
 
465
- def now_date(tz: timezone = timezone.utc) -> datetime:
465
+ def now_date(tz: timezone = UTC) -> datetime:
466
466
  return datetime.now(tz=tz)
467
467
 
468
468
 
@@ -477,7 +477,7 @@ def datetime_to_mysql_ts(datetime_object: datetime) -> datetime:
477
477
  :return: A MySQL-compatible timestamp string with millisecond precision.
478
478
  """
479
479
  if not datetime_object.tzinfo:
480
- datetime_object = datetime_object.replace(tzinfo=timezone.utc)
480
+ datetime_object = datetime_object.replace(tzinfo=UTC)
481
481
 
482
482
  # Round to the nearest millisecond
483
483
  ms = round(datetime_object.microsecond / 1000) * 1000
@@ -488,7 +488,7 @@ def datetime_to_mysql_ts(datetime_object: datetime) -> datetime:
488
488
  return datetime_object.replace(microsecond=ms)
489
489
 
490
490
 
491
- def datetime_min(tz: timezone = timezone.utc) -> datetime:
491
+ def datetime_min(tz: timezone = UTC) -> datetime:
492
492
  return datetime(1970, 1, 1, tzinfo=tz)
493
493
 
494
494
 
@@ -773,11 +773,11 @@ def dict_to_yaml(struct) -> str:
773
773
  # solve numpy json serialization
774
774
  class MyEncoder(json.JSONEncoder):
775
775
  def default(self, obj):
776
- if isinstance(obj, (int, str, float, list, dict)):
776
+ if isinstance(obj, int | str | float | list | dict):
777
777
  return obj
778
- elif isinstance(obj, (np.integer, np.int64)):
778
+ elif isinstance(obj, np.integer | np.int64):
779
779
  return int(obj)
780
- elif isinstance(obj, (np.floating, np.float64)):
780
+ elif isinstance(obj, np.floating | np.float64):
781
781
  return float(obj)
782
782
  elif isinstance(obj, np.ndarray):
783
783
  return obj.tolist()
@@ -1025,8 +1025,10 @@ def enrich_image_url(
1025
1025
  # use the tag from image URL if available, else fallback to the given tag
1026
1026
  tag = image_tag or tag
1027
1027
  if tag:
1028
+ # Remove '-pyXY' suffix if present, since the compatibility check expects a valid semver string
1029
+ tag_for_compatibility = re.sub(r"-py\d+$", "", tag)
1028
1030
  if mlrun.utils.helpers.validate_component_version_compatibility(
1029
- "mlrun-client", "1.10.0-rc0", mlrun_client_version=tag
1031
+ "mlrun-client", "1.10.0-rc0", mlrun_client_version=tag_for_compatibility
1030
1032
  ):
1031
1033
  warnings.warn(
1032
1034
  "'mlrun/ml-base' image is deprecated in 1.10.0 and will be removed in 1.12.0, "
@@ -1531,9 +1533,9 @@ def datetime_from_iso(time_str: str) -> Optional[datetime]:
1531
1533
  return
1532
1534
  dt = parser.isoparse(time_str)
1533
1535
  if dt.tzinfo is None:
1534
- dt = dt.replace(tzinfo=timezone.utc)
1536
+ dt = dt.replace(tzinfo=UTC)
1535
1537
  # ensure the datetime is in UTC, converting if necessary
1536
- return dt.astimezone(timezone.utc)
1538
+ return dt.astimezone(UTC)
1537
1539
 
1538
1540
 
1539
1541
  def datetime_to_iso(time_obj: Optional[datetime]) -> Optional[str]:
@@ -1547,7 +1549,7 @@ def enrich_datetime_with_tz_info(timestamp_string) -> Optional[datetime]:
1547
1549
  return timestamp_string
1548
1550
 
1549
1551
  if timestamp_string and not mlrun.utils.helpers.has_timezone(timestamp_string):
1550
- timestamp_string += datetime.now(timezone.utc).astimezone().strftime("%z")
1552
+ timestamp_string += datetime.now(UTC).astimezone().strftime("%z")
1551
1553
 
1552
1554
  for _format in [
1553
1555
  # e.g: 2021-08-25 12:00:00.000Z
@@ -1578,7 +1580,7 @@ def format_datetime(dt: datetime, fmt: Optional[str] = None) -> str:
1578
1580
 
1579
1581
  # If the datetime is naive
1580
1582
  if dt.tzinfo is None:
1581
- dt = dt.replace(tzinfo=timezone.utc)
1583
+ dt = dt.replace(tzinfo=UTC)
1582
1584
 
1583
1585
  # TODO: Once Python 3.12 is the minimal version, use %:z to format the timezone offset with a colon
1584
1586
  formatted_time = dt.strftime(fmt or "%Y-%m-%d %H:%M:%S.%f%z")
@@ -1740,7 +1742,7 @@ def format_run(run: PipelineRun, with_project=False) -> dict:
1740
1742
  for key, value in run.items():
1741
1743
  if (
1742
1744
  key in time_keys
1743
- and isinstance(value, (str, datetime))
1745
+ and isinstance(value, str | datetime)
1744
1746
  and parser.parse(str(value)).year == 1970
1745
1747
  ):
1746
1748
  run[key] = None
@@ -2147,7 +2149,7 @@ def validate_single_def_handler(function_kind: str, code: str):
2147
2149
  # it would override MLRun's wrapper
2148
2150
  if function_kind == "mlrun":
2149
2151
  # Find all lines that start with "def handler("
2150
- pattern = re.compile(r"^def handler\(", re.MULTILINE)
2152
+ pattern = re.compile(r"^(?:async\s+)?def handler\(", re.MULTILINE)
2151
2153
  matches = pattern.findall(code)
2152
2154
 
2153
2155
  # Only MLRun's wrapper handler (footer) can be in the code
@@ -2454,7 +2456,30 @@ def split_path(path: str) -> typing.Union[str, list[str], None]:
2454
2456
  return path
2455
2457
 
2456
2458
 
2457
- def get_data_from_path(path: typing.Union[str, list[str], None], data: dict) -> Any:
2459
+ def get_data_from_path(
2460
+ path: typing.Union[str, list[str], None], data: typing.Union[dict, list]
2461
+ ) -> Any:
2462
+ if data and isinstance(data, list):
2463
+ output_data = []
2464
+ for item in data:
2465
+ if isinstance(item, dict):
2466
+ output_data.append(get_data_from_dict(path, item))
2467
+ elif path is None:
2468
+ output_data = data
2469
+ else:
2470
+ raise mlrun.errors.MLRunInvalidArgumentError(
2471
+ "If data is a list of non-dict values, path must be None"
2472
+ )
2473
+ return output_data
2474
+ elif isinstance(data, dict):
2475
+ return get_data_from_dict(path, data)
2476
+ else:
2477
+ raise mlrun.errors.MLRunInvalidArgumentError(
2478
+ "Expected data be of type dict or list"
2479
+ )
2480
+
2481
+
2482
+ def get_data_from_dict(path: typing.Union[str, list[str], None], data: dict) -> Any:
2458
2483
  if isinstance(path, str):
2459
2484
  output_data = data.get(path)
2460
2485
  elif isinstance(path, list):
@@ -2552,3 +2577,51 @@ def get_relative_module_name_from_path(
2552
2577
  working_dir_path_object
2553
2578
  )
2554
2579
  return ".".join(relative_path_to_source_file.with_suffix("").parts)
2580
+
2581
+
2582
+ def iguazio_v4_only(function):
2583
+ @functools.wraps(function)
2584
+ def wrapper(*args, **kwargs):
2585
+ if not config.is_iguazio_v4_mode():
2586
+ raise mlrun.errors.MLRunRuntimeError(
2587
+ "This method is only supported in an Iguazio V4 system."
2588
+ )
2589
+ return function(*args, **kwargs)
2590
+
2591
+ return wrapper
2592
+
2593
+
2594
+ def raise_or_log_error(message: str, raise_on_error: bool = True):
2595
+ """
2596
+ Handle errors by either raising an exception or logging a warning.
2597
+
2598
+ :param message: The error message.
2599
+ :param raise_on_error: If True, raises an exception. Otherwise, logs a warning.
2600
+ """
2601
+ if raise_on_error:
2602
+ raise mlrun.errors.MLRunRuntimeError(message)
2603
+ logger.warning(message)
2604
+
2605
+
2606
+ def is_running_in_runtime() -> bool:
2607
+ """
2608
+ Check if the code is running inside an MLRun runtime environment.
2609
+ :return: True if running inside an MLRun runtime, False otherwise.
2610
+ """
2611
+ # Check for the presence of the MLRUN_RUNTIME_KIND environment variable
2612
+ return True if os.getenv("MLRUN_RUNTIME_KIND") else False
2613
+
2614
+
2615
+ def is_async_serving_graph(function_spec) -> bool:
2616
+ """Check if the serving graph contains any async nodes."""
2617
+ if not function_spec:
2618
+ return False
2619
+
2620
+ if (
2621
+ hasattr(function_spec, "graph")
2622
+ and hasattr(function_spec.graph, "engine")
2623
+ and function_spec.graph.engine == "async"
2624
+ ):
2625
+ return True
2626
+
2627
+ return False
mlrun/utils/http.py CHANGED
@@ -68,6 +68,7 @@ class HTTPSessionWithRetry(requests.Session):
68
68
  retry_on_exception=True,
69
69
  retry_on_status=True,
70
70
  retry_on_post=False,
71
+ retry_on_put=True,
71
72
  verbose=False,
72
73
  ):
73
74
  """
@@ -77,6 +78,8 @@ class HTTPSessionWithRetry(requests.Session):
77
78
  :param retry_on_exception: Retry on the HTTP_RETRYABLE_EXCEPTIONS. defaults to True.
78
79
  :param retry_on_status: Retry on error status codes. defaults to True.
79
80
  :param retry_on_post: Retry on POST requests. defaults to False.
81
+ :param retry_on_put: Whether to allow retries on PUT requests. Actual behavior may exclude specific
82
+ paths from retrying. defaults to True.
80
83
  :param verbose: Print debug messages.
81
84
  """
82
85
  super().__init__()
@@ -86,7 +89,7 @@ class HTTPSessionWithRetry(requests.Session):
86
89
  self.retry_on_exception = retry_on_exception
87
90
  self.verbose = verbose
88
91
  self._logger = logger.get_child("http-client")
89
- self._retry_methods = self._resolve_retry_methods(retry_on_post)
92
+ self._retry_methods = self._resolve_retry_methods(retry_on_post, retry_on_put)
90
93
 
91
94
  if retry_on_status:
92
95
  self._http_adapter = requests.adapters.HTTPAdapter(
@@ -200,9 +203,13 @@ class HTTPSessionWithRetry(requests.Session):
200
203
  def _method_retryable(self, method: str):
201
204
  return method in self._retry_methods
202
205
 
203
- def _resolve_retry_methods(self, retry_on_post: bool = False) -> frozenset[str]:
206
+ def _resolve_retry_methods(
207
+ self, retry_on_post: bool = False, retry_on_put: bool = True
208
+ ) -> frozenset[str]:
204
209
  methods = urllib3.util.retry.Retry.DEFAULT_ALLOWED_METHODS
205
210
  methods = methods.union({"PATCH"})
211
+ if not retry_on_put:
212
+ methods = methods.difference({"PUT"})
206
213
  if retry_on_post:
207
214
  methods = methods.union({"POST"})
208
215
  return frozenset(methods)
@@ -142,7 +142,7 @@ class GitNotification(NotificationBase):
142
142
  issue = event["number"]
143
143
  headers = {
144
144
  "Accept": "application/vnd.github.v3+json",
145
- "Authorization": f"token {token}",
145
+ mlrun.common.schemas.HeaderNames.authorization: f"token {token}",
146
146
  }
147
147
  url = f"https://{server}/repos/{repo}/issues/{issue}/comments"
148
148
 
@@ -34,17 +34,18 @@ class MailNotification(base.NotificationBase):
34
34
 
35
35
  boolean_params = ["use_tls", "start_tls", "validate_certs"]
36
36
 
37
+ optional_auth_params = ["username", "password"]
38
+
37
39
  required_params = [
38
40
  "server_host",
39
41
  "server_port",
40
42
  "sender_address",
41
- "username",
42
- "password",
43
43
  "email_addresses",
44
44
  ] + boolean_params
45
45
 
46
46
  @classmethod
47
47
  def validate_params(cls, params):
48
+ cls._enrich_params(params)
48
49
  for required_param in cls.required_params:
49
50
  if required_param not in params:
50
51
  raise ValueError(
@@ -57,6 +58,13 @@ class MailNotification(base.NotificationBase):
57
58
  f"Parameter '{boolean_param}' must be a boolean for MailNotification"
58
59
  )
59
60
 
61
+ # Allow no auth, username only, or username + password
62
+ # Some SMTP servers allow username without password
63
+ if params["password"] and not params["username"]:
64
+ raise ValueError(
65
+ "Parameter 'username' is required when 'password' is provided for MailNotification"
66
+ )
67
+
60
68
  cls._validate_emails(params)
61
69
 
62
70
  async def push(
@@ -78,6 +86,8 @@ class MailNotification(base.NotificationBase):
78
86
  )
79
87
  self.params["body"] = runs_html
80
88
 
89
+ self._enrich_params(self.params)
90
+
81
91
  if message_body_override:
82
92
  self.params["body"] = message_body_override.replace(
83
93
  "{{ runs }}", runs_html
@@ -122,7 +132,7 @@ class MailNotification(base.NotificationBase):
122
132
  def _validate_emails(cls, params):
123
133
  cls._validate_email_address(params["sender_address"])
124
134
 
125
- if not isinstance(params["email_addresses"], (str, list)):
135
+ if not isinstance(params["email_addresses"], str | list):
126
136
  raise ValueError(
127
137
  "Parameter 'email_addresses' must be a string or a list of strings"
128
138
  )
@@ -147,8 +157,8 @@ class MailNotification(base.NotificationBase):
147
157
  sender_address: str,
148
158
  server_host: str,
149
159
  server_port: int,
150
- username: str,
151
- password: str,
160
+ username: typing.Optional[str],
161
+ password: typing.Optional[str],
152
162
  use_tls: bool,
153
163
  start_tls: bool,
154
164
  validate_certs: bool,
@@ -163,14 +173,27 @@ class MailNotification(base.NotificationBase):
163
173
  message["Subject"] = subject
164
174
  message.attach(MIMEText(body, "html"))
165
175
 
166
- # Send the email
167
- await aiosmtplib.send(
168
- message,
169
- hostname=server_host,
170
- port=server_port,
171
- username=username,
172
- password=password,
173
- use_tls=use_tls,
174
- validate_certs=validate_certs,
175
- start_tls=start_tls,
176
- )
176
+ send_kwargs = {
177
+ "hostname": server_host,
178
+ "port": server_port,
179
+ "use_tls": use_tls,
180
+ "validate_certs": validate_certs,
181
+ "start_tls": start_tls,
182
+ }
183
+
184
+ # Only include auth parameters when provided to avoid forcing SMTP AUTH
185
+ if username is not None:
186
+ send_kwargs["username"] = username
187
+ if password is not None:
188
+ send_kwargs["password"] = password
189
+
190
+ await aiosmtplib.send(message, **send_kwargs)
191
+
192
+ @staticmethod
193
+ def _enrich_params(params):
194
+ # if username/password are not provided or empty strings, set them to None.
195
+ # this ensures consistent behavior in _send_email and avoids
196
+ # forcing SMTP auth when the server does not require authentication.
197
+ for param in ["username", "password"]:
198
+ if param not in params or not params[param]:
199
+ params[param] = None
@@ -347,7 +347,7 @@ class NotificationPusher(_NotificationPusherBase):
347
347
  run_uid=run.metadata.uid,
348
348
  )
349
349
  update_notification_status_kwargs["sent_time"] = datetime.datetime.now(
350
- tz=datetime.timezone.utc
350
+ tz=datetime.UTC
351
351
  )
352
352
  except Exception as exc:
353
353
  logger.warning(
@@ -397,7 +397,7 @@ class NotificationPusher(_NotificationPusherBase):
397
397
  run_uid=run.metadata.uid,
398
398
  )
399
399
  update_notification_status_kwargs["sent_time"] = datetime.datetime.now(
400
- tz=datetime.timezone.utc
400
+ tz=datetime.UTC
401
401
  )
402
402
 
403
403
  except Exception as exc:
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "3bb8c631304e89739430fbb6e6299e190fe6738a",
3
- "version": "1.10.0-rc40"
2
+ "git_commit": "160a94f1532940f8a7d039ed5488912c27098edf",
3
+ "version": "1.11.0-rc16"
4
4
  }
@@ -14,7 +14,7 @@
14
14
 
15
15
  import json
16
16
  import sys
17
- from importlib.resources import read_text
17
+ from importlib.resources import files
18
18
 
19
19
  import mlrun.utils
20
20
  from mlrun.utils.singleton import Singleton
@@ -37,9 +37,8 @@ class Version(metaclass=Singleton):
37
37
  self.version_info = {"git_commit": "unknown", "version": "0.0.0+unstable"}
38
38
  self.python_version = self._resolve_python_version()
39
39
  try:
40
- self.version_info = json.loads(
41
- read_text("mlrun.utils.version", "version.json")
42
- )
40
+ with (files("mlrun.utils.version") / "version.json").open("r") as read_text:
41
+ self.version_info = json.loads(read_text.read())
43
42
  except Exception:
44
43
  mlrun.utils.logger.warning(
45
44
  "Failed resolving version info. Ignoring and using defaults"