mlrun 1.6.0rc35__py3-none-any.whl → 1.7.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (199) hide show
  1. mlrun/__main__.py +3 -3
  2. mlrun/api/schemas/__init__.py +1 -1
  3. mlrun/artifacts/base.py +11 -6
  4. mlrun/artifacts/dataset.py +2 -2
  5. mlrun/artifacts/model.py +30 -24
  6. mlrun/artifacts/plots.py +2 -2
  7. mlrun/common/db/sql_session.py +5 -3
  8. mlrun/common/helpers.py +1 -2
  9. mlrun/common/schemas/artifact.py +3 -3
  10. mlrun/common/schemas/auth.py +3 -3
  11. mlrun/common/schemas/background_task.py +1 -1
  12. mlrun/common/schemas/client_spec.py +1 -1
  13. mlrun/common/schemas/feature_store.py +16 -16
  14. mlrun/common/schemas/frontend_spec.py +7 -7
  15. mlrun/common/schemas/function.py +1 -1
  16. mlrun/common/schemas/hub.py +4 -9
  17. mlrun/common/schemas/memory_reports.py +2 -2
  18. mlrun/common/schemas/model_monitoring/grafana.py +4 -4
  19. mlrun/common/schemas/model_monitoring/model_endpoints.py +14 -15
  20. mlrun/common/schemas/notification.py +4 -4
  21. mlrun/common/schemas/object.py +2 -2
  22. mlrun/common/schemas/pipeline.py +1 -1
  23. mlrun/common/schemas/project.py +3 -3
  24. mlrun/common/schemas/runtime_resource.py +8 -12
  25. mlrun/common/schemas/schedule.py +3 -3
  26. mlrun/common/schemas/tag.py +1 -2
  27. mlrun/common/schemas/workflow.py +2 -2
  28. mlrun/config.py +8 -4
  29. mlrun/data_types/to_pandas.py +1 -3
  30. mlrun/datastore/base.py +0 -28
  31. mlrun/datastore/datastore_profile.py +9 -9
  32. mlrun/datastore/filestore.py +0 -1
  33. mlrun/datastore/google_cloud_storage.py +1 -1
  34. mlrun/datastore/sources.py +7 -11
  35. mlrun/datastore/spark_utils.py +1 -2
  36. mlrun/datastore/targets.py +31 -31
  37. mlrun/datastore/utils.py +4 -6
  38. mlrun/datastore/v3io.py +70 -46
  39. mlrun/db/base.py +22 -23
  40. mlrun/db/httpdb.py +34 -34
  41. mlrun/db/nopdb.py +19 -19
  42. mlrun/errors.py +1 -1
  43. mlrun/execution.py +4 -4
  44. mlrun/feature_store/api.py +20 -21
  45. mlrun/feature_store/common.py +1 -1
  46. mlrun/feature_store/feature_set.py +28 -32
  47. mlrun/feature_store/feature_vector.py +24 -27
  48. mlrun/feature_store/retrieval/base.py +7 -7
  49. mlrun/feature_store/retrieval/conversion.py +2 -4
  50. mlrun/feature_store/steps.py +7 -15
  51. mlrun/features.py +5 -7
  52. mlrun/frameworks/_common/artifacts_library.py +9 -9
  53. mlrun/frameworks/_common/mlrun_interface.py +5 -5
  54. mlrun/frameworks/_common/model_handler.py +48 -48
  55. mlrun/frameworks/_common/plan.py +2 -3
  56. mlrun/frameworks/_common/producer.py +3 -4
  57. mlrun/frameworks/_common/utils.py +5 -5
  58. mlrun/frameworks/_dl_common/loggers/logger.py +6 -7
  59. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +9 -9
  60. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +16 -35
  61. mlrun/frameworks/_ml_common/artifacts_library.py +1 -2
  62. mlrun/frameworks/_ml_common/loggers/logger.py +3 -4
  63. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +4 -5
  64. mlrun/frameworks/_ml_common/model_handler.py +24 -24
  65. mlrun/frameworks/_ml_common/pkl_model_server.py +2 -2
  66. mlrun/frameworks/_ml_common/plan.py +1 -1
  67. mlrun/frameworks/_ml_common/plans/calibration_curve_plan.py +2 -3
  68. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +2 -3
  69. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  70. mlrun/frameworks/_ml_common/plans/feature_importance_plan.py +3 -3
  71. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  72. mlrun/frameworks/_ml_common/utils.py +4 -4
  73. mlrun/frameworks/auto_mlrun/auto_mlrun.py +7 -7
  74. mlrun/frameworks/huggingface/model_server.py +4 -4
  75. mlrun/frameworks/lgbm/__init__.py +32 -32
  76. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -5
  77. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -5
  78. mlrun/frameworks/lgbm/mlrun_interfaces/booster_mlrun_interface.py +1 -3
  79. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +6 -6
  80. mlrun/frameworks/lgbm/model_handler.py +9 -9
  81. mlrun/frameworks/lgbm/model_server.py +6 -6
  82. mlrun/frameworks/lgbm/utils.py +5 -5
  83. mlrun/frameworks/onnx/dataset.py +8 -8
  84. mlrun/frameworks/onnx/mlrun_interface.py +3 -3
  85. mlrun/frameworks/onnx/model_handler.py +6 -6
  86. mlrun/frameworks/onnx/model_server.py +7 -7
  87. mlrun/frameworks/parallel_coordinates.py +2 -2
  88. mlrun/frameworks/pytorch/__init__.py +16 -16
  89. mlrun/frameworks/pytorch/callbacks/callback.py +4 -5
  90. mlrun/frameworks/pytorch/callbacks/logging_callback.py +17 -17
  91. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +11 -11
  92. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +23 -29
  93. mlrun/frameworks/pytorch/callbacks_handler.py +38 -38
  94. mlrun/frameworks/pytorch/mlrun_interface.py +20 -20
  95. mlrun/frameworks/pytorch/model_handler.py +17 -17
  96. mlrun/frameworks/pytorch/model_server.py +7 -7
  97. mlrun/frameworks/sklearn/__init__.py +12 -12
  98. mlrun/frameworks/sklearn/estimator.py +4 -4
  99. mlrun/frameworks/sklearn/metrics_library.py +14 -14
  100. mlrun/frameworks/sklearn/mlrun_interface.py +3 -6
  101. mlrun/frameworks/sklearn/model_handler.py +2 -2
  102. mlrun/frameworks/tf_keras/__init__.py +5 -5
  103. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +14 -14
  104. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +11 -11
  105. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +19 -23
  106. mlrun/frameworks/tf_keras/mlrun_interface.py +7 -9
  107. mlrun/frameworks/tf_keras/model_handler.py +14 -14
  108. mlrun/frameworks/tf_keras/model_server.py +6 -6
  109. mlrun/frameworks/xgboost/__init__.py +12 -12
  110. mlrun/frameworks/xgboost/model_handler.py +6 -6
  111. mlrun/k8s_utils.py +4 -5
  112. mlrun/kfpops.py +2 -2
  113. mlrun/launcher/base.py +10 -10
  114. mlrun/launcher/local.py +8 -8
  115. mlrun/launcher/remote.py +7 -7
  116. mlrun/lists.py +3 -4
  117. mlrun/model.py +205 -55
  118. mlrun/model_monitoring/api.py +21 -24
  119. mlrun/model_monitoring/application.py +4 -4
  120. mlrun/model_monitoring/batch.py +17 -17
  121. mlrun/model_monitoring/controller.py +2 -1
  122. mlrun/model_monitoring/features_drift_table.py +44 -31
  123. mlrun/model_monitoring/prometheus.py +1 -4
  124. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +11 -13
  125. mlrun/model_monitoring/stores/model_endpoint_store.py +9 -11
  126. mlrun/model_monitoring/stores/models/__init__.py +2 -2
  127. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +11 -13
  128. mlrun/model_monitoring/stream_processing.py +16 -34
  129. mlrun/model_monitoring/tracking_policy.py +2 -1
  130. mlrun/package/__init__.py +6 -6
  131. mlrun/package/context_handler.py +5 -5
  132. mlrun/package/packager.py +7 -7
  133. mlrun/package/packagers/default_packager.py +6 -6
  134. mlrun/package/packagers/numpy_packagers.py +15 -15
  135. mlrun/package/packagers/pandas_packagers.py +5 -5
  136. mlrun/package/packagers/python_standard_library_packagers.py +10 -10
  137. mlrun/package/packagers_manager.py +18 -23
  138. mlrun/package/utils/_formatter.py +4 -4
  139. mlrun/package/utils/_pickler.py +2 -2
  140. mlrun/package/utils/_supported_format.py +4 -4
  141. mlrun/package/utils/log_hint_utils.py +2 -2
  142. mlrun/package/utils/type_hint_utils.py +4 -9
  143. mlrun/platforms/other.py +1 -2
  144. mlrun/projects/operations.py +5 -5
  145. mlrun/projects/pipelines.py +9 -9
  146. mlrun/projects/project.py +58 -46
  147. mlrun/render.py +1 -1
  148. mlrun/run.py +9 -9
  149. mlrun/runtimes/__init__.py +7 -4
  150. mlrun/runtimes/base.py +20 -23
  151. mlrun/runtimes/constants.py +5 -5
  152. mlrun/runtimes/daskjob.py +8 -8
  153. mlrun/runtimes/databricks_job/databricks_cancel_task.py +1 -1
  154. mlrun/runtimes/databricks_job/databricks_runtime.py +7 -7
  155. mlrun/runtimes/function_reference.py +1 -1
  156. mlrun/runtimes/local.py +1 -1
  157. mlrun/runtimes/mpijob/abstract.py +1 -2
  158. mlrun/runtimes/nuclio/__init__.py +20 -0
  159. mlrun/runtimes/{function.py → nuclio/function.py} +15 -16
  160. mlrun/runtimes/{nuclio.py → nuclio/nuclio.py} +6 -6
  161. mlrun/runtimes/{serving.py → nuclio/serving.py} +13 -12
  162. mlrun/runtimes/pod.py +95 -48
  163. mlrun/runtimes/remotesparkjob.py +1 -1
  164. mlrun/runtimes/sparkjob/spark3job.py +50 -33
  165. mlrun/runtimes/utils.py +1 -2
  166. mlrun/secrets.py +3 -3
  167. mlrun/serving/remote.py +0 -4
  168. mlrun/serving/routers.py +6 -6
  169. mlrun/serving/server.py +4 -4
  170. mlrun/serving/states.py +29 -0
  171. mlrun/serving/utils.py +3 -3
  172. mlrun/serving/v1_serving.py +6 -7
  173. mlrun/serving/v2_serving.py +50 -8
  174. mlrun/track/tracker_manager.py +3 -3
  175. mlrun/track/trackers/mlflow_tracker.py +1 -2
  176. mlrun/utils/async_http.py +5 -7
  177. mlrun/utils/azure_vault.py +1 -1
  178. mlrun/utils/clones.py +1 -2
  179. mlrun/utils/condition_evaluator.py +3 -3
  180. mlrun/utils/db.py +3 -3
  181. mlrun/utils/helpers.py +37 -119
  182. mlrun/utils/http.py +1 -4
  183. mlrun/utils/logger.py +49 -14
  184. mlrun/utils/notifications/notification/__init__.py +3 -3
  185. mlrun/utils/notifications/notification/base.py +2 -2
  186. mlrun/utils/notifications/notification/ipython.py +1 -1
  187. mlrun/utils/notifications/notification_pusher.py +8 -14
  188. mlrun/utils/retryer.py +207 -0
  189. mlrun/utils/singleton.py +1 -1
  190. mlrun/utils/v3io_clients.py +2 -3
  191. mlrun/utils/version/version.json +2 -2
  192. mlrun/utils/version/version.py +2 -6
  193. {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/METADATA +9 -9
  194. mlrun-1.7.0rc2.dist-info/RECORD +315 -0
  195. mlrun-1.6.0rc35.dist-info/RECORD +0 -313
  196. {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/LICENSE +0 -0
  197. {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/WHEEL +0 -0
  198. {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/entry_points.txt +0 -0
  199. {mlrun-1.6.0rc35.dist-info → mlrun-1.7.0rc2.dist-info}/top_level.txt +0 -0
mlrun/db/nopdb.py CHANGED
@@ -14,7 +14,7 @@
14
14
 
15
15
 
16
16
  import datetime
17
- from typing import List, Optional, Union
17
+ from typing import Optional, Union
18
18
 
19
19
  import mlrun.common.schemas
20
20
  import mlrun.errors
@@ -76,9 +76,9 @@ class NopDB(RunDBInterface):
76
76
  def list_runs(
77
77
  self,
78
78
  name: Optional[str] = None,
79
- uid: Optional[Union[str, List[str]]] = None,
79
+ uid: Optional[Union[str, list[str]]] = None,
80
80
  project: Optional[str] = None,
81
- labels: Optional[Union[str, List[str]]] = None,
81
+ labels: Optional[Union[str, list[str]]] = None,
82
82
  state: Optional[str] = None,
83
83
  sort: bool = True,
84
84
  last: int = 0,
@@ -197,7 +197,7 @@ class NopDB(RunDBInterface):
197
197
  self,
198
198
  owner: str = None,
199
199
  format_: mlrun.common.schemas.ProjectsFormat = mlrun.common.schemas.ProjectsFormat.name_only,
200
- labels: List[str] = None,
200
+ labels: list[str] = None,
201
201
  state: mlrun.common.schemas.ProjectState = None,
202
202
  ) -> mlrun.common.schemas.ProjectsOutput:
203
203
  pass
@@ -230,13 +230,13 @@ class NopDB(RunDBInterface):
230
230
  project: str,
231
231
  name: str = None,
232
232
  tag: str = None,
233
- entities: List[str] = None,
234
- labels: List[str] = None,
233
+ entities: list[str] = None,
234
+ labels: list[str] = None,
235
235
  ) -> mlrun.common.schemas.FeaturesOutput:
236
236
  pass
237
237
 
238
238
  def list_entities(
239
- self, project: str, name: str = None, tag: str = None, labels: List[str] = None
239
+ self, project: str, name: str = None, tag: str = None, labels: list[str] = None
240
240
  ) -> mlrun.common.schemas.EntitiesOutput:
241
241
  pass
242
242
 
@@ -246,9 +246,9 @@ class NopDB(RunDBInterface):
246
246
  name: str = None,
247
247
  tag: str = None,
248
248
  state: str = None,
249
- entities: List[str] = None,
250
- features: List[str] = None,
251
- labels: List[str] = None,
249
+ entities: list[str] = None,
250
+ features: list[str] = None,
251
+ labels: list[str] = None,
252
252
  partition_by: Union[
253
253
  mlrun.common.schemas.FeatureStorePartitionByField, str
254
254
  ] = None,
@@ -257,7 +257,7 @@ class NopDB(RunDBInterface):
257
257
  partition_order: Union[
258
258
  mlrun.common.schemas.OrderType, str
259
259
  ] = mlrun.common.schemas.OrderType.desc,
260
- ) -> List[dict]:
260
+ ) -> list[dict]:
261
261
  pass
262
262
 
263
263
  def store_feature_set(
@@ -306,7 +306,7 @@ class NopDB(RunDBInterface):
306
306
  name: str = None,
307
307
  tag: str = None,
308
308
  state: str = None,
309
- labels: List[str] = None,
309
+ labels: list[str] = None,
310
310
  partition_by: Union[
311
311
  mlrun.common.schemas.FeatureStorePartitionByField, str
312
312
  ] = None,
@@ -315,7 +315,7 @@ class NopDB(RunDBInterface):
315
315
  partition_order: Union[
316
316
  mlrun.common.schemas.OrderType, str
317
317
  ] = mlrun.common.schemas.OrderType.desc,
318
- ) -> List[dict]:
318
+ ) -> list[dict]:
319
319
  pass
320
320
 
321
321
  def store_feature_vector(
@@ -388,7 +388,7 @@ class NopDB(RunDBInterface):
388
388
  provider: Union[
389
389
  str, mlrun.common.schemas.SecretProviderName
390
390
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
391
- secrets: List[str] = None,
391
+ secrets: list[str] = None,
392
392
  ) -> mlrun.common.schemas.SecretsData:
393
393
  pass
394
394
 
@@ -408,7 +408,7 @@ class NopDB(RunDBInterface):
408
408
  provider: Union[
409
409
  str, mlrun.common.schemas.SecretProviderName
410
410
  ] = mlrun.common.schemas.SecretProviderName.kubernetes,
411
- secrets: List[str] = None,
411
+ secrets: list[str] = None,
412
412
  ):
413
413
  pass
414
414
 
@@ -438,10 +438,10 @@ class NopDB(RunDBInterface):
438
438
  project: str,
439
439
  model: Optional[str] = None,
440
440
  function: Optional[str] = None,
441
- labels: List[str] = None,
441
+ labels: list[str] = None,
442
442
  start: str = "now-1h",
443
443
  end: str = "now",
444
- metrics: Optional[List[str]] = None,
444
+ metrics: Optional[list[str]] = None,
445
445
  ):
446
446
  pass
447
447
 
@@ -451,7 +451,7 @@ class NopDB(RunDBInterface):
451
451
  endpoint_id: str,
452
452
  start: Optional[str] = None,
453
453
  end: Optional[str] = None,
454
- metrics: Optional[List[str]] = None,
454
+ metrics: Optional[list[str]] = None,
455
455
  features: bool = False,
456
456
  ):
457
457
  pass
@@ -522,7 +522,7 @@ class NopDB(RunDBInterface):
522
522
 
523
523
  def list_datastore_profiles(
524
524
  self, project: str
525
- ) -> List[mlrun.common.schemas.DatastoreProfile]:
525
+ ) -> list[mlrun.common.schemas.DatastoreProfile]:
526
526
  pass
527
527
 
528
528
  def store_datastore_profile(
mlrun/errors.py CHANGED
@@ -73,7 +73,7 @@ class MLRunHTTPStatusError(MLRunHTTPError):
73
73
  error_status_code = None
74
74
 
75
75
  def __init__(self, *args, response: requests.Response = None, **kwargs):
76
- super(MLRunHTTPStatusError, self).__init__(
76
+ super().__init__(
77
77
  *args, response=response, status_code=self.error_status_code, **kwargs
78
78
  )
79
79
 
mlrun/execution.py CHANGED
@@ -15,7 +15,7 @@
15
15
  import os
16
16
  import uuid
17
17
  from copy import deepcopy
18
- from typing import List, Union
18
+ from typing import Union
19
19
 
20
20
  import numpy as np
21
21
  import yaml
@@ -45,7 +45,7 @@ from .utils import (
45
45
  )
46
46
 
47
47
 
48
- class MLClientCtx(object):
48
+ class MLClientCtx:
49
49
  """ML Execution Client Context
50
50
 
51
51
  The context is generated and injected to the function using the ``function.run()``
@@ -738,8 +738,8 @@ class MLClientCtx(object):
738
738
  artifact_path=None,
739
739
  upload=True,
740
740
  labels=None,
741
- inputs: List[Feature] = None,
742
- outputs: List[Feature] = None,
741
+ inputs: list[Feature] = None,
742
+ outputs: list[Feature] = None,
743
743
  feature_vector: str = None,
744
744
  feature_weights: list = None,
745
745
  training_set=None,
@@ -15,9 +15,8 @@ import copy
15
15
  import importlib.util
16
16
  import pathlib
17
17
  import sys
18
- import typing
19
18
  from datetime import datetime
20
- from typing import Any, Dict, List, Optional, Union
19
+ from typing import Any, Optional, Union
21
20
 
22
21
  import pandas as pd
23
22
  from deprecated import deprecated
@@ -103,7 +102,7 @@ def get_offline_features(
103
102
  entity_timestamp_column: str = None,
104
103
  target: DataTargetBase = None,
105
104
  run_config: RunConfig = None,
106
- drop_columns: List[str] = None,
105
+ drop_columns: list[str] = None,
107
106
  start_time: Union[str, datetime] = None,
108
107
  end_time: Union[str, datetime] = None,
109
108
  with_indexes: bool = False,
@@ -111,9 +110,9 @@ def get_offline_features(
111
110
  engine: str = None,
112
111
  engine_args: dict = None,
113
112
  query: str = None,
114
- order_by: Union[str, List[str]] = None,
113
+ order_by: Union[str, list[str]] = None,
115
114
  spark_service: str = None,
116
- timestamp_for_filtering: Union[str, Dict[str, str]] = None,
115
+ timestamp_for_filtering: Union[str, dict[str, str]] = None,
117
116
  ):
118
117
  """retrieve offline feature vector results
119
118
 
@@ -201,7 +200,7 @@ def _get_offline_features(
201
200
  entity_timestamp_column: str = None,
202
201
  target: DataTargetBase = None,
203
202
  run_config: RunConfig = None,
204
- drop_columns: List[str] = None,
203
+ drop_columns: list[str] = None,
205
204
  start_time: Union[str, datetime] = None,
206
205
  end_time: Union[str, datetime] = None,
207
206
  with_indexes: bool = False,
@@ -209,9 +208,9 @@ def _get_offline_features(
209
208
  engine: str = None,
210
209
  engine_args: dict = None,
211
210
  query: str = None,
212
- order_by: Union[str, List[str]] = None,
211
+ order_by: Union[str, list[str]] = None,
213
212
  spark_service: str = None,
214
- timestamp_for_filtering: Union[str, Dict[str, str]] = None,
213
+ timestamp_for_filtering: Union[str, dict[str, str]] = None,
215
214
  ) -> Union[OfflineVectorResponse, RemoteVectorResponse]:
216
215
  if entity_rows is None and entity_timestamp_column is not None:
217
216
  raise mlrun.errors.MLRunInvalidArgumentError(
@@ -280,7 +279,7 @@ def get_online_feature_service(
280
279
  fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
281
280
  impute_policy: dict = None,
282
281
  update_stats: bool = False,
283
- entity_keys: List[str] = None,
282
+ entity_keys: list[str] = None,
284
283
  ):
285
284
  """initialize and return online feature vector service api,
286
285
  returns :py:class:`~mlrun.feature_store.OnlineVectorService`
@@ -361,7 +360,7 @@ def _get_online_feature_service(
361
360
  fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
362
361
  impute_policy: dict = None,
363
362
  update_stats: bool = False,
364
- entity_keys: List[str] = None,
363
+ entity_keys: list[str] = None,
365
364
  ) -> OnlineVectorService:
366
365
  if isinstance(feature_vector, FeatureVector):
367
366
  update_stats = True
@@ -413,7 +412,7 @@ def _rename_source_dataframe_columns(df: pd.DataFrame) -> pd.DataFrame:
413
412
  return df
414
413
 
415
414
 
416
- def _get_namespace(run_config: RunConfig) -> Dict[str, Any]:
415
+ def _get_namespace(run_config: RunConfig) -> dict[str, Any]:
417
416
  # if running locally, we need to import the file dynamically to get its namespace
418
417
  if run_config and run_config.local and run_config.function:
419
418
  filename = run_config.function.spec.filename
@@ -431,7 +430,7 @@ def _get_namespace(run_config: RunConfig) -> Dict[str, Any]:
431
430
  def ingest(
432
431
  featureset: Union[FeatureSet, str] = None,
433
432
  source=None,
434
- targets: List[DataTargetBase] = None,
433
+ targets: list[DataTargetBase] = None,
435
434
  namespace=None,
436
435
  return_df: bool = True,
437
436
  infer_options: InferOptions = InferOptions.default(),
@@ -511,7 +510,7 @@ def ingest(
511
510
  def _ingest(
512
511
  featureset: Union[FeatureSet, str] = None,
513
512
  source=None,
514
- targets: List[DataTargetBase] = None,
513
+ targets: list[DataTargetBase] = None,
515
514
  namespace=None,
516
515
  return_df: bool = True,
517
516
  infer_options: InferOptions = InferOptions.default(),
@@ -876,7 +875,7 @@ def _preview(
876
875
  def _run_ingestion_job(
877
876
  featureset: Union[FeatureSet, str],
878
877
  source: DataSource = None,
879
- targets: List[DataTargetBase] = None,
878
+ targets: list[DataTargetBase] = None,
880
879
  name: str = None,
881
880
  infer_options: InferOptions = InferOptions.default(),
882
881
  run_config: RunConfig = None,
@@ -901,11 +900,11 @@ def _run_ingestion_job(
901
900
  def deploy_ingestion_service_v2(
902
901
  featureset: Union[FeatureSet, str],
903
902
  source: DataSource = None,
904
- targets: List[DataTargetBase] = None,
903
+ targets: list[DataTargetBase] = None,
905
904
  name: str = None,
906
905
  run_config: RunConfig = None,
907
906
  verbose=False,
908
- ) -> typing.Tuple[str, BaseRuntime]:
907
+ ) -> tuple[str, BaseRuntime]:
909
908
  """Start real-time ingestion service using nuclio function
910
909
 
911
910
  Deploy a real-time function implementing feature ingestion pipeline
@@ -944,11 +943,11 @@ def deploy_ingestion_service_v2(
944
943
  def _deploy_ingestion_service_v2(
945
944
  featureset: Union[FeatureSet, str],
946
945
  source: DataSource = None,
947
- targets: List[DataTargetBase] = None,
946
+ targets: list[DataTargetBase] = None,
948
947
  name: str = None,
949
948
  run_config: RunConfig = None,
950
949
  verbose=False,
951
- ) -> typing.Tuple[str, BaseRuntime]:
950
+ ) -> tuple[str, BaseRuntime]:
952
951
  if isinstance(featureset, str):
953
952
  featureset = get_feature_set_by_uri(featureset)
954
953
 
@@ -1011,7 +1010,7 @@ def _deploy_ingestion_service_v2(
1011
1010
  def deploy_ingestion_service(
1012
1011
  featureset: Union[FeatureSet, str],
1013
1012
  source: DataSource = None,
1014
- targets: List[DataTargetBase] = None,
1013
+ targets: list[DataTargetBase] = None,
1015
1014
  name: str = None,
1016
1015
  run_config: RunConfig = None,
1017
1016
  verbose=False,
@@ -1054,7 +1053,7 @@ def _ingest_with_spark(
1054
1053
  spark=None,
1055
1054
  featureset: Union[FeatureSet, str] = None,
1056
1055
  source: BaseSourceDriver = None,
1057
- targets: List[BaseStoreTarget] = None,
1056
+ targets: list[BaseStoreTarget] = None,
1058
1057
  infer_options: InferOptions = InferOptions.default(),
1059
1058
  mlrun_context=None,
1060
1059
  namespace=None,
@@ -1207,7 +1206,7 @@ def _infer_from_static_df(
1207
1206
  def set_task_params(
1208
1207
  featureset: FeatureSet,
1209
1208
  source: DataSource = None,
1210
- targets: List[DataTargetBase] = None,
1209
+ targets: list[DataTargetBase] = None,
1211
1210
  parameters: dict = None,
1212
1211
  infer_options: InferOptions = InferOptions.Null,
1213
1212
  overwrite=None,
@@ -192,7 +192,7 @@ class RunConfig:
192
192
  owner=None,
193
193
  credentials: typing.Optional[mlrun.model.Credentials] = None,
194
194
  code: str = None,
195
- requirements: typing.Union[str, typing.List[str]] = None,
195
+ requirements: typing.Union[str, list[str]] = None,
196
196
  extra_spec: dict = None,
197
197
  auth_info=None,
198
198
  ):
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  import warnings
15
15
  from datetime import datetime
16
- from typing import Dict, List, Optional, Tuple, Union
16
+ from typing import Optional, Union
17
17
 
18
18
  import pandas as pd
19
19
  from storey import EmitEveryEvent, EmitPolicy
@@ -119,9 +119,9 @@ class FeatureSetSpec(ModelObj):
119
119
 
120
120
  self.owner = owner
121
121
  self.description = description
122
- self.entities: List[Union[Entity, str]] = entities or []
123
- self.relations: Dict[str, Union[Entity, str]] = relations or {}
124
- self.features: List[Feature] = features or []
122
+ self.entities: list[Union[Entity, str]] = entities or []
123
+ self.relations: dict[str, Union[Entity, str]] = relations or {}
124
+ self.features: list[Feature] = features or []
125
125
  self.partition_keys = partition_keys or []
126
126
  self.timestamp_key = timestamp_key
127
127
  self.source = source
@@ -136,12 +136,12 @@ class FeatureSetSpec(ModelObj):
136
136
  self.with_default_targets = True
137
137
 
138
138
  @property
139
- def entities(self) -> List[Entity]:
139
+ def entities(self) -> list[Entity]:
140
140
  """feature set entities (indexes)"""
141
141
  return self._entities
142
142
 
143
143
  @entities.setter
144
- def entities(self, entities: List[Union[Entity, str]]):
144
+ def entities(self, entities: list[Union[Entity, str]]):
145
145
  if entities:
146
146
  # if the entity is a string, convert it to Entity class
147
147
  for i, entity in enumerate(entities):
@@ -163,21 +163,21 @@ class FeatureSetSpec(ModelObj):
163
163
  self._entities = ObjectList.from_list(Entity, entities)
164
164
 
165
165
  @property
166
- def features(self) -> List[Feature]:
166
+ def features(self) -> list[Feature]:
167
167
  """feature set features list"""
168
168
  return self._features
169
169
 
170
170
  @features.setter
171
- def features(self, features: List[Feature]):
171
+ def features(self, features: list[Feature]):
172
172
  self._features = ObjectList.from_list(Feature, features)
173
173
 
174
174
  @property
175
- def targets(self) -> List[DataTargetBase]:
175
+ def targets(self) -> list[DataTargetBase]:
176
176
  """list of desired targets (material storage)"""
177
177
  return self._targets
178
178
 
179
179
  @targets.setter
180
- def targets(self, targets: List[DataTargetBase]):
180
+ def targets(self, targets: list[DataTargetBase]):
181
181
  self._targets = ObjectList.from_list(DataTargetBase, targets)
182
182
 
183
183
  @property
@@ -230,12 +230,12 @@ class FeatureSetSpec(ModelObj):
230
230
  self._source = source
231
231
 
232
232
  @property
233
- def relations(self) -> Dict[str, Entity]:
233
+ def relations(self) -> dict[str, Entity]:
234
234
  """feature set relations dict"""
235
235
  return self._relations
236
236
 
237
237
  @relations.setter
238
- def relations(self, relations: Dict[str, Entity]):
238
+ def relations(self, relations: dict[str, Entity]):
239
239
  for col, ent in relations.items():
240
240
  if isinstance(ent, str):
241
241
  relations[col] = Entity(ent)
@@ -284,12 +284,12 @@ class FeatureSetStatus(ModelObj):
284
284
  self.run_uri = run_uri
285
285
 
286
286
  @property
287
- def targets(self) -> List[DataTarget]:
287
+ def targets(self) -> list[DataTarget]:
288
288
  """list of material storage targets + their status/path"""
289
289
  return self._targets
290
290
 
291
291
  @targets.setter
292
- def targets(self, targets: List[DataTarget]):
292
+ def targets(self, targets: list[DataTarget]):
293
293
  self._targets = ObjectList.from_list(DataTarget, targets)
294
294
 
295
295
  def update_target(self, target: DataTarget):
@@ -318,8 +318,6 @@ def emit_policy_to_dict(policy: EmitPolicy):
318
318
 
319
319
 
320
320
  class FeatureSet(ModelObj):
321
- """Feature set object, defines a set of features and their data pipeline"""
322
-
323
321
  kind = mlrun.common.schemas.ObjectKind.feature_set.value
324
322
  _dict_fields = ["kind", "metadata", "spec", "status"]
325
323
 
@@ -327,11 +325,11 @@ class FeatureSet(ModelObj):
327
325
  self,
328
326
  name: str = None,
329
327
  description: str = None,
330
- entities: List[Union[Entity, str]] = None,
328
+ entities: list[Union[Entity, str]] = None,
331
329
  timestamp_key: str = None,
332
330
  engine: str = None,
333
331
  label_column: str = None,
334
- relations: Dict[str, Union[Entity, str]] = None,
332
+ relations: dict[str, Union[Entity, str]] = None,
335
333
  passthrough: bool = None,
336
334
  ):
337
335
  """Feature set object, defines a set of features and their data pipeline
@@ -532,7 +530,7 @@ class FeatureSet(ModelObj):
532
530
  self, **(class_args if class_args is not None else {})
533
531
  )
534
532
 
535
- def purge_targets(self, target_names: List[str] = None, silent: bool = False):
533
+ def purge_targets(self, target_names: list[str] = None, silent: bool = False):
536
534
  """Delete data of specific targets
537
535
  :param target_names: List of names of targets to delete (default: delete all ingested targets)
538
536
  :param silent: Fail silently if target doesn't exist in featureset status"""
@@ -560,7 +558,7 @@ class FeatureSet(ModelObj):
560
558
 
561
559
  def update_targets_for_ingest(
562
560
  self,
563
- targets: List[DataTargetBase],
561
+ targets: list[DataTargetBase],
564
562
  overwrite: bool = None,
565
563
  ):
566
564
  if not targets:
@@ -581,7 +579,7 @@ class FeatureSet(ModelObj):
581
579
  update_targets_run_id_for_ingest(overwrite, targets, status_targets)
582
580
 
583
581
  def _reload_and_get_status_targets(
584
- self, target_names: List[str] = None, silent: bool = False
582
+ self, target_names: list[str] = None, silent: bool = False
585
583
  ):
586
584
  try:
587
585
  self.reload(update_spec=False)
@@ -602,9 +600,7 @@ class FeatureSet(ModelObj):
602
600
  pass
603
601
  else:
604
602
  raise mlrun.errors.MLRunNotFoundError(
605
- "Target not found in status (fset={0}, target={1})".format(
606
- self.metadata.name, target_name
607
- )
603
+ f"Target not found in status (fset={self.metadata.name}, target={target_name})"
608
604
  )
609
605
  else:
610
606
  targets = self.status.targets
@@ -621,7 +617,7 @@ class FeatureSet(ModelObj):
621
617
  name: str,
622
618
  value_type: mlrun.data_types.ValueType = None,
623
619
  description: str = None,
624
- labels: Optional[Dict[str, str]] = None,
620
+ labels: Optional[dict[str, str]] = None,
625
621
  ):
626
622
  """add/set an entity (dataset index)
627
623
 
@@ -983,7 +979,7 @@ class FeatureSet(ModelObj):
983
979
  def ingest(
984
980
  self,
985
981
  source=None,
986
- targets: List[DataTargetBase] = None,
982
+ targets: list[DataTargetBase] = None,
987
983
  namespace=None,
988
984
  return_df: bool = True,
989
985
  infer_options: InferOptions = InferOptions.default(),
@@ -1085,11 +1081,11 @@ class FeatureSet(ModelObj):
1085
1081
  def deploy_ingestion_service(
1086
1082
  self,
1087
1083
  source: DataSource = None,
1088
- targets: List[DataTargetBase] = None,
1084
+ targets: list[DataTargetBase] = None,
1089
1085
  name: str = None,
1090
1086
  run_config: RunConfig = None,
1091
1087
  verbose=False,
1092
- ) -> Tuple[str, BaseRuntime]:
1088
+ ) -> tuple[str, BaseRuntime]:
1093
1089
  """Start real-time ingestion service using nuclio function
1094
1090
 
1095
1091
  Deploy a real-time function implementing feature ingestion pipeline
@@ -1122,7 +1118,7 @@ class FeatureSet(ModelObj):
1122
1118
  def extract_relation_keys(
1123
1119
  self,
1124
1120
  other_feature_set,
1125
- relations: Dict[str, Union[str, Entity]] = None,
1121
+ relations: dict[str, Union[str, Entity]] = None,
1126
1122
  ) -> list[str]:
1127
1123
  """
1128
1124
  Checks whether a feature set can be merged to the right of this feature set.
@@ -1189,10 +1185,10 @@ class SparkAggregateByKey(StepToDict):
1189
1185
 
1190
1186
  def __init__(
1191
1187
  self,
1192
- key_columns: List[str],
1188
+ key_columns: list[str],
1193
1189
  time_column: str,
1194
- aggregates: List[Dict],
1195
- emit_policy: Union[EmitPolicy, Dict] = None,
1190
+ aggregates: list[dict],
1191
+ emit_policy: Union[EmitPolicy, dict] = None,
1196
1192
  ):
1197
1193
  self.key_columns = key_columns
1198
1194
  self.time_column = time_column