mlrun 1.7.2__py3-none-any.whl → 1.8.0rc1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (222) hide show
  1. mlrun/__init__.py +14 -12
  2. mlrun/__main__.py +3 -3
  3. mlrun/alerts/alert.py +19 -12
  4. mlrun/artifacts/__init__.py +0 -2
  5. mlrun/artifacts/base.py +34 -11
  6. mlrun/artifacts/dataset.py +16 -16
  7. mlrun/artifacts/manager.py +13 -13
  8. mlrun/artifacts/model.py +66 -53
  9. mlrun/common/constants.py +6 -0
  10. mlrun/common/formatters/__init__.py +1 -0
  11. mlrun/common/formatters/feature_set.py +1 -0
  12. mlrun/common/formatters/function.py +1 -0
  13. mlrun/common/formatters/model_endpoint.py +30 -0
  14. mlrun/common/formatters/pipeline.py +1 -2
  15. mlrun/common/model_monitoring/__init__.py +0 -3
  16. mlrun/common/model_monitoring/helpers.py +1 -1
  17. mlrun/common/runtimes/constants.py +1 -2
  18. mlrun/common/schemas/__init__.py +4 -2
  19. mlrun/common/schemas/artifact.py +0 -6
  20. mlrun/common/schemas/common.py +50 -0
  21. mlrun/common/schemas/model_monitoring/__init__.py +8 -1
  22. mlrun/common/schemas/model_monitoring/constants.py +62 -12
  23. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +149 -0
  24. mlrun/common/schemas/model_monitoring/model_endpoints.py +21 -5
  25. mlrun/common/schemas/partition.py +122 -0
  26. mlrun/config.py +43 -15
  27. mlrun/data_types/__init__.py +0 -2
  28. mlrun/data_types/data_types.py +0 -1
  29. mlrun/data_types/infer.py +3 -1
  30. mlrun/data_types/spark.py +4 -4
  31. mlrun/data_types/to_pandas.py +2 -11
  32. mlrun/datastore/__init__.py +0 -2
  33. mlrun/datastore/alibaba_oss.py +4 -1
  34. mlrun/datastore/azure_blob.py +4 -1
  35. mlrun/datastore/base.py +12 -4
  36. mlrun/datastore/datastore.py +9 -3
  37. mlrun/datastore/datastore_profile.py +1 -1
  38. mlrun/datastore/dbfs_store.py +4 -1
  39. mlrun/datastore/filestore.py +4 -1
  40. mlrun/datastore/google_cloud_storage.py +4 -1
  41. mlrun/datastore/hdfs.py +4 -1
  42. mlrun/datastore/inmem.py +4 -1
  43. mlrun/datastore/redis.py +4 -1
  44. mlrun/datastore/s3.py +4 -1
  45. mlrun/datastore/sources.py +51 -49
  46. mlrun/datastore/store_resources.py +0 -2
  47. mlrun/datastore/targets.py +22 -23
  48. mlrun/datastore/utils.py +2 -2
  49. mlrun/datastore/v3io.py +4 -1
  50. mlrun/datastore/wasbfs/fs.py +13 -12
  51. mlrun/db/base.py +126 -62
  52. mlrun/db/factory.py +3 -0
  53. mlrun/db/httpdb.py +767 -231
  54. mlrun/db/nopdb.py +126 -57
  55. mlrun/errors.py +2 -2
  56. mlrun/execution.py +55 -29
  57. mlrun/feature_store/__init__.py +0 -2
  58. mlrun/feature_store/api.py +40 -40
  59. mlrun/feature_store/common.py +9 -9
  60. mlrun/feature_store/feature_set.py +20 -18
  61. mlrun/feature_store/feature_vector.py +27 -24
  62. mlrun/feature_store/retrieval/base.py +14 -9
  63. mlrun/feature_store/retrieval/job.py +2 -1
  64. mlrun/feature_store/steps.py +2 -2
  65. mlrun/features.py +30 -13
  66. mlrun/frameworks/__init__.py +1 -2
  67. mlrun/frameworks/_common/__init__.py +1 -2
  68. mlrun/frameworks/_common/artifacts_library.py +2 -2
  69. mlrun/frameworks/_common/mlrun_interface.py +10 -6
  70. mlrun/frameworks/_common/model_handler.py +29 -27
  71. mlrun/frameworks/_common/producer.py +3 -1
  72. mlrun/frameworks/_dl_common/__init__.py +1 -2
  73. mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
  74. mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
  75. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
  76. mlrun/frameworks/_ml_common/__init__.py +1 -2
  77. mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
  78. mlrun/frameworks/_ml_common/model_handler.py +21 -21
  79. mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
  80. mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
  81. mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
  82. mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
  83. mlrun/frameworks/auto_mlrun/__init__.py +1 -2
  84. mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
  85. mlrun/frameworks/huggingface/__init__.py +1 -2
  86. mlrun/frameworks/huggingface/model_server.py +9 -9
  87. mlrun/frameworks/lgbm/__init__.py +47 -44
  88. mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
  89. mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
  90. mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
  91. mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
  92. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
  93. mlrun/frameworks/lgbm/model_handler.py +15 -11
  94. mlrun/frameworks/lgbm/model_server.py +11 -7
  95. mlrun/frameworks/lgbm/utils.py +2 -2
  96. mlrun/frameworks/onnx/__init__.py +1 -2
  97. mlrun/frameworks/onnx/dataset.py +3 -3
  98. mlrun/frameworks/onnx/mlrun_interface.py +2 -2
  99. mlrun/frameworks/onnx/model_handler.py +7 -5
  100. mlrun/frameworks/onnx/model_server.py +8 -6
  101. mlrun/frameworks/parallel_coordinates.py +11 -11
  102. mlrun/frameworks/pytorch/__init__.py +22 -23
  103. mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
  104. mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
  105. mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
  106. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
  107. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
  108. mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
  109. mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
  110. mlrun/frameworks/pytorch/model_handler.py +21 -17
  111. mlrun/frameworks/pytorch/model_server.py +13 -9
  112. mlrun/frameworks/sklearn/__init__.py +19 -18
  113. mlrun/frameworks/sklearn/estimator.py +2 -2
  114. mlrun/frameworks/sklearn/metric.py +3 -3
  115. mlrun/frameworks/sklearn/metrics_library.py +8 -6
  116. mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
  117. mlrun/frameworks/sklearn/model_handler.py +4 -3
  118. mlrun/frameworks/tf_keras/__init__.py +11 -12
  119. mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
  120. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
  121. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
  122. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
  123. mlrun/frameworks/tf_keras/model_handler.py +17 -13
  124. mlrun/frameworks/tf_keras/model_server.py +12 -8
  125. mlrun/frameworks/xgboost/__init__.py +19 -18
  126. mlrun/frameworks/xgboost/model_handler.py +13 -9
  127. mlrun/launcher/base.py +3 -4
  128. mlrun/launcher/local.py +1 -1
  129. mlrun/launcher/remote.py +1 -1
  130. mlrun/lists.py +4 -3
  131. mlrun/model.py +108 -44
  132. mlrun/model_monitoring/__init__.py +1 -2
  133. mlrun/model_monitoring/api.py +6 -6
  134. mlrun/model_monitoring/applications/_application_steps.py +13 -15
  135. mlrun/model_monitoring/applications/histogram_data_drift.py +41 -15
  136. mlrun/model_monitoring/applications/results.py +55 -3
  137. mlrun/model_monitoring/controller.py +185 -223
  138. mlrun/model_monitoring/db/_schedules.py +156 -0
  139. mlrun/model_monitoring/db/_stats.py +189 -0
  140. mlrun/model_monitoring/db/stores/__init__.py +1 -1
  141. mlrun/model_monitoring/db/stores/base/store.py +6 -65
  142. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -25
  143. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -97
  144. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +2 -58
  145. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -15
  146. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +6 -257
  147. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +9 -271
  148. mlrun/model_monitoring/db/tsdb/base.py +74 -22
  149. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +66 -35
  150. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
  151. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +284 -51
  152. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
  153. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +35 -17
  154. mlrun/model_monitoring/helpers.py +97 -1
  155. mlrun/model_monitoring/model_endpoint.py +4 -2
  156. mlrun/model_monitoring/stream_processing.py +2 -2
  157. mlrun/model_monitoring/tracking_policy.py +10 -3
  158. mlrun/model_monitoring/writer.py +47 -26
  159. mlrun/package/__init__.py +3 -6
  160. mlrun/package/context_handler.py +1 -1
  161. mlrun/package/packager.py +12 -9
  162. mlrun/package/packagers/__init__.py +0 -2
  163. mlrun/package/packagers/default_packager.py +14 -11
  164. mlrun/package/packagers/numpy_packagers.py +16 -7
  165. mlrun/package/packagers/pandas_packagers.py +18 -18
  166. mlrun/package/packagers/python_standard_library_packagers.py +25 -11
  167. mlrun/package/packagers_manager.py +31 -14
  168. mlrun/package/utils/__init__.py +0 -3
  169. mlrun/package/utils/_pickler.py +6 -6
  170. mlrun/platforms/__init__.py +3 -3
  171. mlrun/platforms/iguazio.py +4 -1
  172. mlrun/projects/__init__.py +1 -6
  173. mlrun/projects/operations.py +27 -27
  174. mlrun/projects/pipelines.py +85 -215
  175. mlrun/projects/project.py +444 -158
  176. mlrun/run.py +9 -9
  177. mlrun/runtimes/__init__.py +1 -3
  178. mlrun/runtimes/base.py +13 -10
  179. mlrun/runtimes/daskjob.py +9 -9
  180. mlrun/runtimes/generators.py +2 -1
  181. mlrun/runtimes/kubejob.py +4 -5
  182. mlrun/runtimes/mpijob/__init__.py +0 -2
  183. mlrun/runtimes/mpijob/abstract.py +7 -6
  184. mlrun/runtimes/nuclio/api_gateway.py +7 -7
  185. mlrun/runtimes/nuclio/application/application.py +11 -11
  186. mlrun/runtimes/nuclio/function.py +14 -14
  187. mlrun/runtimes/nuclio/serving.py +9 -9
  188. mlrun/runtimes/pod.py +74 -29
  189. mlrun/runtimes/remotesparkjob.py +3 -2
  190. mlrun/runtimes/sparkjob/__init__.py +0 -2
  191. mlrun/runtimes/sparkjob/spark3job.py +21 -11
  192. mlrun/runtimes/utils.py +6 -5
  193. mlrun/serving/merger.py +6 -4
  194. mlrun/serving/remote.py +18 -17
  195. mlrun/serving/routers.py +27 -27
  196. mlrun/serving/server.py +1 -1
  197. mlrun/serving/states.py +76 -71
  198. mlrun/serving/utils.py +13 -2
  199. mlrun/serving/v1_serving.py +3 -2
  200. mlrun/serving/v2_serving.py +4 -4
  201. mlrun/track/__init__.py +1 -1
  202. mlrun/track/tracker.py +2 -2
  203. mlrun/track/trackers/mlflow_tracker.py +6 -5
  204. mlrun/utils/async_http.py +1 -1
  205. mlrun/utils/helpers.py +72 -28
  206. mlrun/utils/logger.py +104 -2
  207. mlrun/utils/notifications/notification/base.py +23 -4
  208. mlrun/utils/notifications/notification/console.py +1 -1
  209. mlrun/utils/notifications/notification/git.py +6 -6
  210. mlrun/utils/notifications/notification/ipython.py +5 -4
  211. mlrun/utils/notifications/notification/slack.py +1 -1
  212. mlrun/utils/notifications/notification/webhook.py +13 -17
  213. mlrun/utils/notifications/notification_pusher.py +23 -19
  214. mlrun/utils/regex.py +1 -1
  215. mlrun/utils/version/version.json +2 -2
  216. {mlrun-1.7.2.dist-info → mlrun-1.8.0rc1.dist-info}/METADATA +187 -199
  217. mlrun-1.8.0rc1.dist-info/RECORD +356 -0
  218. {mlrun-1.7.2.dist-info → mlrun-1.8.0rc1.dist-info}/WHEEL +1 -1
  219. mlrun-1.7.2.dist-info/RECORD +0 -351
  220. {mlrun-1.7.2.dist-info → mlrun-1.8.0rc1.dist-info}/LICENSE +0 -0
  221. {mlrun-1.7.2.dist-info → mlrun-1.8.0rc1.dist-info}/entry_points.txt +0 -0
  222. {mlrun-1.7.2.dist-info → mlrun-1.8.0rc1.dist-info}/top_level.txt +0 -0
@@ -99,21 +99,21 @@ def _features_to_vector_and_check_permissions(features, update_stats):
99
99
  def get_offline_features(
100
100
  feature_vector: Union[str, FeatureVector],
101
101
  entity_rows=None,
102
- entity_timestamp_column: str = None,
102
+ entity_timestamp_column: Optional[str] = None,
103
103
  target: DataTargetBase = None,
104
104
  run_config: RunConfig = None,
105
- drop_columns: list[str] = None,
106
- start_time: Union[str, datetime] = None,
107
- end_time: Union[str, datetime] = None,
105
+ drop_columns: Optional[list[str]] = None,
106
+ start_time: Optional[Union[str, datetime]] = None,
107
+ end_time: Optional[Union[str, datetime]] = None,
108
108
  with_indexes: bool = False,
109
109
  update_stats: bool = False,
110
- engine: str = None,
111
- engine_args: dict = None,
112
- query: str = None,
113
- order_by: Union[str, list[str]] = None,
114
- spark_service: str = None,
115
- timestamp_for_filtering: Union[str, dict[str, str]] = None,
116
- additional_filters: list = None,
110
+ engine: Optional[str] = None,
111
+ engine_args: Optional[dict] = None,
112
+ query: Optional[str] = None,
113
+ order_by: Optional[Union[str, list[str]]] = None,
114
+ spark_service: Optional[str] = None,
115
+ timestamp_for_filtering: Optional[Union[str, dict[str, str]]] = None,
116
+ additional_filters: Optional[list] = None,
117
117
  ):
118
118
  """retrieve offline feature vector results
119
119
 
@@ -209,20 +209,20 @@ def get_offline_features(
209
209
  def _get_offline_features(
210
210
  feature_vector: Union[str, FeatureVector],
211
211
  entity_rows=None,
212
- entity_timestamp_column: str = None,
212
+ entity_timestamp_column: Optional[str] = None,
213
213
  target: DataTargetBase = None,
214
214
  run_config: RunConfig = None,
215
- drop_columns: list[str] = None,
216
- start_time: Union[str, datetime] = None,
217
- end_time: Union[str, datetime] = None,
215
+ drop_columns: Optional[list[str]] = None,
216
+ start_time: Optional[Union[str, datetime]] = None,
217
+ end_time: Optional[Union[str, datetime]] = None,
218
218
  with_indexes: bool = False,
219
219
  update_stats: bool = False,
220
- engine: str = None,
221
- engine_args: dict = None,
222
- query: str = None,
223
- order_by: Union[str, list[str]] = None,
224
- spark_service: str = None,
225
- timestamp_for_filtering: Union[str, dict[str, str]] = None,
220
+ engine: Optional[str] = None,
221
+ engine_args: Optional[dict] = None,
222
+ query: Optional[str] = None,
223
+ order_by: Optional[Union[str, list[str]]] = None,
224
+ spark_service: Optional[str] = None,
225
+ timestamp_for_filtering: Optional[Union[str, dict[str, str]]] = None,
226
226
  additional_filters=None,
227
227
  ) -> Union[OfflineVectorResponse, RemoteVectorResponse]:
228
228
  if entity_rows is None and entity_timestamp_column is not None:
@@ -297,9 +297,9 @@ def get_online_feature_service(
297
297
  feature_vector: Union[str, FeatureVector],
298
298
  run_config: RunConfig = None,
299
299
  fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
300
- impute_policy: dict = None,
300
+ impute_policy: Optional[dict] = None,
301
301
  update_stats: bool = False,
302
- entity_keys: list[str] = None,
302
+ entity_keys: Optional[list[str]] = None,
303
303
  ):
304
304
  """initialize and return online feature vector service api,
305
305
  returns :py:class:`~mlrun.feature_store.OnlineVectorService`
@@ -378,9 +378,9 @@ def _get_online_feature_service(
378
378
  feature_vector: Union[str, FeatureVector],
379
379
  run_config: RunConfig = None,
380
380
  fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
381
- impute_policy: dict = None,
381
+ impute_policy: Optional[dict] = None,
382
382
  update_stats: bool = False,
383
- entity_keys: list[str] = None,
383
+ entity_keys: Optional[list[str]] = None,
384
384
  ) -> OnlineVectorService:
385
385
  if isinstance(feature_vector, FeatureVector):
386
386
  update_stats = True
@@ -450,7 +450,7 @@ def _get_namespace(run_config: RunConfig) -> dict[str, Any]:
450
450
  def ingest(
451
451
  featureset: Union[FeatureSet, str] = None,
452
452
  source=None,
453
- targets: list[DataTargetBase] = None,
453
+ targets: Optional[list[DataTargetBase]] = None,
454
454
  namespace=None,
455
455
  return_df: bool = True,
456
456
  infer_options: InferOptions = InferOptions.default(),
@@ -530,7 +530,7 @@ def ingest(
530
530
  def _ingest(
531
531
  featureset: Union[FeatureSet, str] = None,
532
532
  source=None,
533
- targets: list[DataTargetBase] = None,
533
+ targets: Optional[list[DataTargetBase]] = None,
534
534
  namespace=None,
535
535
  return_df: bool = True,
536
536
  infer_options: InferOptions = InferOptions.default(),
@@ -783,11 +783,11 @@ def _ingest(
783
783
  def preview(
784
784
  featureset: FeatureSet,
785
785
  source,
786
- entity_columns: list = None,
786
+ entity_columns: Optional[list] = None,
787
787
  namespace=None,
788
788
  options: InferOptions = None,
789
789
  verbose: bool = False,
790
- sample_size: int = None,
790
+ sample_size: Optional[int] = None,
791
791
  ) -> pd.DataFrame:
792
792
  """run the ingestion pipeline with local DataFrame/file data and infer features schema and stats
793
793
 
@@ -825,11 +825,11 @@ def preview(
825
825
  def _preview(
826
826
  featureset: FeatureSet,
827
827
  source,
828
- entity_columns: list = None,
828
+ entity_columns: Optional[list] = None,
829
829
  namespace=None,
830
830
  options: InferOptions = None,
831
831
  verbose: bool = False,
832
- sample_size: int = None,
832
+ sample_size: Optional[int] = None,
833
833
  ) -> pd.DataFrame:
834
834
  if isinstance(source, pd.DataFrame):
835
835
  source = _rename_source_dataframe_columns(source)
@@ -895,8 +895,8 @@ def _preview(
895
895
  def _run_ingestion_job(
896
896
  featureset: Union[FeatureSet, str],
897
897
  source: DataSource = None,
898
- targets: list[DataTargetBase] = None,
899
- name: str = None,
898
+ targets: Optional[list[DataTargetBase]] = None,
899
+ name: Optional[str] = None,
900
900
  infer_options: InferOptions = InferOptions.default(),
901
901
  run_config: RunConfig = None,
902
902
  ):
@@ -920,8 +920,8 @@ def _run_ingestion_job(
920
920
  def deploy_ingestion_service_v2(
921
921
  featureset: Union[FeatureSet, str],
922
922
  source: DataSource = None,
923
- targets: list[DataTargetBase] = None,
924
- name: str = None,
923
+ targets: Optional[list[DataTargetBase]] = None,
924
+ name: Optional[str] = None,
925
925
  run_config: RunConfig = None,
926
926
  verbose=False,
927
927
  ) -> tuple[str, BaseRuntime]:
@@ -963,8 +963,8 @@ def deploy_ingestion_service_v2(
963
963
  def _deploy_ingestion_service_v2(
964
964
  featureset: Union[FeatureSet, str],
965
965
  source: DataSource = None,
966
- targets: list[DataTargetBase] = None,
967
- name: str = None,
966
+ targets: Optional[list[DataTargetBase]] = None,
967
+ name: Optional[str] = None,
968
968
  run_config: RunConfig = None,
969
969
  verbose=False,
970
970
  ) -> tuple[str, BaseRuntime]:
@@ -1026,7 +1026,7 @@ def _ingest_with_spark(
1026
1026
  spark=None,
1027
1027
  featureset: Union[FeatureSet, str] = None,
1028
1028
  source: BaseSourceDriver = None,
1029
- targets: list[BaseStoreTarget] = None,
1029
+ targets: Optional[list[BaseStoreTarget]] = None,
1030
1030
  infer_options: InferOptions = InferOptions.default(),
1031
1031
  mlrun_context=None,
1032
1032
  namespace=None,
@@ -1199,8 +1199,8 @@ def _infer_from_static_df(
1199
1199
  def set_task_params(
1200
1200
  featureset: FeatureSet,
1201
1201
  source: DataSource = None,
1202
- targets: list[DataTargetBase] = None,
1203
- parameters: dict = None,
1202
+ targets: Optional[list[DataTargetBase]] = None,
1203
+ parameters: Optional[dict] = None,
1204
1204
  infer_options: InferOptions = InferOptions.Null,
1205
1205
  overwrite=None,
1206
1206
  ):
@@ -178,17 +178,17 @@ class RunConfig:
178
178
  def __init__(
179
179
  self,
180
180
  function: typing.Union[str, FunctionReference, BaseRuntime] = None,
181
- local: bool = None,
182
- image: str = None,
183
- kind: str = None,
184
- handler: str = None,
185
- parameters: dict = None,
186
- watch: bool = None,
181
+ local: typing.Optional[bool] = None,
182
+ image: typing.Optional[str] = None,
183
+ kind: typing.Optional[str] = None,
184
+ handler: typing.Optional[str] = None,
185
+ parameters: typing.Optional[dict] = None,
186
+ watch: typing.Optional[bool] = None,
187
187
  owner=None,
188
188
  credentials: typing.Optional[mlrun.model.Credentials] = None,
189
- code: str = None,
190
- requirements: typing.Union[str, list[str]] = None,
191
- extra_spec: dict = None,
189
+ code: typing.Optional[str] = None,
190
+ requirements: typing.Optional[typing.Union[str, list[str]]] = None,
191
+ extra_spec: typing.Optional[dict] = None,
192
192
  auth_info=None,
193
193
  ):
194
194
  """class for holding function and run specs for jobs and serving functions
@@ -323,14 +323,14 @@ class FeatureSet(ModelObj):
323
323
 
324
324
  def __init__(
325
325
  self,
326
- name: str = None,
327
- description: str = None,
328
- entities: list[Union[Entity, str]] = None,
329
- timestamp_key: str = None,
330
- engine: str = None,
331
- label_column: str = None,
332
- relations: dict[str, Union[Entity, str]] = None,
333
- passthrough: bool = None,
326
+ name: Optional[str] = None,
327
+ description: Optional[str] = None,
328
+ entities: Optional[list[Union[Entity, str]]] = None,
329
+ timestamp_key: Optional[str] = None,
330
+ engine: Optional[str] = None,
331
+ label_column: Optional[str] = None,
332
+ relations: Optional[dict[str, Union[Entity, str]]] = None,
333
+ passthrough: Optional[bool] = None,
334
334
  ):
335
335
  """Feature set object, defines a set of features and their data pipeline
336
336
 
@@ -533,7 +533,9 @@ class FeatureSet(ModelObj):
533
533
  self, **(class_args if class_args is not None else {})
534
534
  )
535
535
 
536
- def purge_targets(self, target_names: list[str] = None, silent: bool = False):
536
+ def purge_targets(
537
+ self, target_names: Optional[list[str]] = None, silent: bool = False
538
+ ):
537
539
  """Delete data of specific targets
538
540
  :param target_names: List of names of targets to delete (default: delete all ingested targets)
539
541
  :param silent: Fail silently if target doesn't exist in featureset status"""
@@ -562,7 +564,7 @@ class FeatureSet(ModelObj):
562
564
  def update_targets_for_ingest(
563
565
  self,
564
566
  targets: list[DataTargetBase],
565
- overwrite: bool = None,
567
+ overwrite: Optional[bool] = None,
566
568
  ):
567
569
  if not targets:
568
570
  return
@@ -582,7 +584,7 @@ class FeatureSet(ModelObj):
582
584
  update_targets_run_id_for_ingest(overwrite, targets, status_targets)
583
585
 
584
586
  def _reload_and_get_status_targets(
585
- self, target_names: list[str] = None, silent: bool = False
587
+ self, target_names: Optional[list[str]] = None, silent: bool = False
586
588
  ):
587
589
  try:
588
590
  self.reload(update_spec=False)
@@ -619,7 +621,7 @@ class FeatureSet(ModelObj):
619
621
  self,
620
622
  name: str,
621
623
  value_type: mlrun.data_types.ValueType = None,
622
- description: str = None,
624
+ description: Optional[str] = None,
623
625
  labels: Optional[dict[str, str]] = None,
624
626
  ):
625
627
  """add/set an entity (dataset index)
@@ -1004,7 +1006,7 @@ class FeatureSet(ModelObj):
1004
1006
  def ingest(
1005
1007
  self,
1006
1008
  source=None,
1007
- targets: list[DataTargetBase] = None,
1009
+ targets: Optional[list[DataTargetBase]] = None,
1008
1010
  namespace=None,
1009
1011
  return_df: bool = True,
1010
1012
  infer_options: InferOptions = InferOptions.default(),
@@ -1073,11 +1075,11 @@ class FeatureSet(ModelObj):
1073
1075
  def preview(
1074
1076
  self,
1075
1077
  source,
1076
- entity_columns: list = None,
1078
+ entity_columns: Optional[list] = None,
1077
1079
  namespace=None,
1078
1080
  options: InferOptions = None,
1079
1081
  verbose: bool = False,
1080
- sample_size: int = None,
1082
+ sample_size: Optional[int] = None,
1081
1083
  ) -> pd.DataFrame:
1082
1084
  """run the ingestion pipeline with local DataFrame/file data and infer features schema and stats
1083
1085
 
@@ -1106,8 +1108,8 @@ class FeatureSet(ModelObj):
1106
1108
  def deploy_ingestion_service(
1107
1109
  self,
1108
1110
  source: DataSource = None,
1109
- targets: list[DataTargetBase] = None,
1110
- name: str = None,
1111
+ targets: Optional[list[DataTargetBase]] = None,
1112
+ name: Optional[str] = None,
1111
1113
  run_config: RunConfig = None,
1112
1114
  verbose=False,
1113
1115
  ) -> tuple[str, BaseRuntime]:
@@ -1143,7 +1145,7 @@ class FeatureSet(ModelObj):
1143
1145
  def extract_relation_keys(
1144
1146
  self,
1145
1147
  other_feature_set,
1146
- relations: dict[str, Union[str, Entity]] = None,
1148
+ relations: Optional[dict[str, Union[str, Entity]]] = None,
1147
1149
  ) -> list[str]:
1148
1150
  """
1149
1151
  Checks whether a feature set can be merged to the right of this feature set.
@@ -212,7 +212,7 @@ class JoinGraph(ModelObj):
212
212
 
213
213
  def __init__(
214
214
  self,
215
- name: str = None,
215
+ name: typing.Optional[str] = None,
216
216
  first_feature_set: Union[str, FeatureSet] = None,
217
217
  ):
218
218
  """
@@ -318,7 +318,10 @@ class JoinGraph(ModelObj):
318
318
  return self._join_operands(other_operand, JoinGraph.first_join_type)
319
319
 
320
320
  def _init_all_join_keys(
321
- self, feature_set_objects, vector, entity_rows_keys: list[str] = None
321
+ self,
322
+ feature_set_objects,
323
+ vector,
324
+ entity_rows_keys: typing.Optional[list[str]] = None,
322
325
  ):
323
326
  for step in self.steps:
324
327
  step.init_join_keys(feature_set_objects, vector, entity_rows_keys)
@@ -371,11 +374,11 @@ class JoinGraph(ModelObj):
371
374
  class _JoinStep(ModelObj):
372
375
  def __init__(
373
376
  self,
374
- name: str = None,
375
- left_step_name: str = None,
376
- right_step_name: str = None,
377
- left_feature_set_names: Union[str, list[str]] = None,
378
- right_feature_set_name: str = None,
377
+ name: typing.Optional[str] = None,
378
+ left_step_name: typing.Optional[str] = None,
379
+ right_step_name: typing.Optional[str] = None,
380
+ left_feature_set_names: typing.Optional[Union[str, list[str]]] = None,
381
+ right_feature_set_name: typing.Optional[str] = None,
379
382
  join_type: str = "inner",
380
383
  asof_join: bool = False,
381
384
  ):
@@ -399,7 +402,7 @@ class _JoinStep(ModelObj):
399
402
  self,
400
403
  feature_set_objects: ObjectList,
401
404
  vector,
402
- entity_rows_keys: list[str] = None,
405
+ entity_rows_keys: typing.Optional[list[str]] = None,
403
406
  ):
404
407
  if feature_set_objects[self.right_feature_set_name].is_connectable_to_df(
405
408
  entity_rows_keys
@@ -479,7 +482,7 @@ class FeatureVector(ModelObj):
479
482
  description=None,
480
483
  with_indexes=None,
481
484
  join_graph: JoinGraph = None,
482
- relations: dict[str, dict[str, Union[Entity, str]]] = None,
485
+ relations: typing.Optional[dict[str, dict[str, Union[Entity, str]]]] = None,
483
486
  ):
484
487
  """Feature vector, specify selected features, their metadata and material views
485
488
 
@@ -727,21 +730,21 @@ class FeatureVector(ModelObj):
727
730
  def get_offline_features(
728
731
  self,
729
732
  entity_rows=None,
730
- entity_timestamp_column: str = None,
733
+ entity_timestamp_column: typing.Optional[str] = None,
731
734
  target: DataTargetBase = None,
732
735
  run_config: RunConfig = None,
733
- drop_columns: list[str] = None,
734
- start_time: Union[str, datetime] = None,
735
- end_time: Union[str, datetime] = None,
736
+ drop_columns: typing.Optional[list[str]] = None,
737
+ start_time: typing.Optional[Union[str, datetime]] = None,
738
+ end_time: typing.Optional[Union[str, datetime]] = None,
736
739
  with_indexes: bool = False,
737
740
  update_stats: bool = False,
738
- engine: str = None,
739
- engine_args: dict = None,
740
- query: str = None,
741
- order_by: Union[str, list[str]] = None,
742
- spark_service: str = None,
743
- timestamp_for_filtering: Union[str, dict[str, str]] = None,
744
- additional_filters: list = None,
741
+ engine: typing.Optional[str] = None,
742
+ engine_args: typing.Optional[dict] = None,
743
+ query: typing.Optional[str] = None,
744
+ order_by: typing.Optional[Union[str, list[str]]] = None,
745
+ spark_service: typing.Optional[str] = None,
746
+ timestamp_for_filtering: typing.Optional[Union[str, dict[str, str]]] = None,
747
+ additional_filters: typing.Optional[list] = None,
745
748
  ):
746
749
  """retrieve offline feature vector results
747
750
 
@@ -831,9 +834,9 @@ class FeatureVector(ModelObj):
831
834
  self,
832
835
  run_config: RunConfig = None,
833
836
  fixed_window_type: FixedWindowType = FixedWindowType.LastClosedWindow,
834
- impute_policy: dict = None,
837
+ impute_policy: typing.Optional[dict] = None,
835
838
  update_stats: bool = False,
836
- entity_keys: list[str] = None,
839
+ entity_keys: typing.Optional[list[str]] = None,
837
840
  ):
838
841
  """initialize and return online feature vector service api,
839
842
  returns :py:class:`~mlrun.feature_store.OnlineVectorService`
@@ -915,8 +918,8 @@ class OnlineVectorService:
915
918
  vector,
916
919
  graph,
917
920
  index_columns,
918
- impute_policy: dict = None,
919
- requested_columns: list[str] = None,
921
+ impute_policy: typing.Optional[dict] = None,
922
+ requested_columns: typing.Optional[list[str]] = None,
920
923
  ):
921
924
  self.vector = vector
922
925
  self.impute_policy = impute_policy or {}
@@ -64,7 +64,12 @@ class BaseMerger(abc.ABC):
64
64
  if self._drop_indexes:
65
65
  self._append_drop_column(key)
66
66
 
67
- def _update_alias(self, key: str = None, val: str = None, dictionary: dict = None):
67
+ def _update_alias(
68
+ self,
69
+ key: typing.Optional[str] = None,
70
+ val: typing.Optional[str] = None,
71
+ dictionary: typing.Optional[dict] = None,
72
+ ):
68
73
  if dictionary is not None:
69
74
  # adding dictionary to alias
70
75
  self._alias.update(dictionary)
@@ -414,8 +419,8 @@ class BaseMerger(abc.ABC):
414
419
  entity_timestamp_column: str,
415
420
  featuresets: list,
416
421
  featureset_dfs: list,
417
- keys: list = None,
418
- join_types: list = None,
422
+ keys: typing.Optional[list] = None,
423
+ join_types: typing.Optional[list] = None,
419
424
  ):
420
425
  """join the entities and feature set features into a result dataframe"""
421
426
 
@@ -544,8 +549,8 @@ class BaseMerger(abc.ABC):
544
549
  self,
545
550
  name: str,
546
551
  order: int,
547
- left_keys: list[str] = None,
548
- right_keys: list[str] = None,
552
+ left_keys: typing.Optional[list[str]] = None,
553
+ right_keys: typing.Optional[list[str]] = None,
549
554
  ):
550
555
  self.name = name
551
556
  self.left_keys = left_keys if left_keys is not None else []
@@ -755,9 +760,9 @@ class BaseMerger(abc.ABC):
755
760
  self,
756
761
  feature_set: FeatureSet,
757
762
  feature_set_name: list[str],
758
- column_names: list[str] = None,
759
- start_time: typing.Union[str, datetime] = None,
760
- end_time: typing.Union[str, datetime] = None,
763
+ column_names: typing.Optional[list[str]] = None,
764
+ start_time: typing.Optional[typing.Union[str, datetime]] = None,
765
+ end_time: typing.Optional[typing.Union[str, datetime]] = None,
761
766
  time_column: typing.Optional[str] = None,
762
767
  additional_filters=None,
763
768
  ):
@@ -779,7 +784,7 @@ class BaseMerger(abc.ABC):
779
784
  self,
780
785
  df,
781
786
  rename_col_dict: dict[str, str],
782
- columns: list[str] = None,
787
+ columns: typing.Optional[list[str]] = None,
783
788
  ):
784
789
  """
785
790
  rename the columns of the df according to rename_col_dict, and select only `columns` if it is not none
@@ -13,6 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  import uuid
16
+ from typing import Optional
16
17
 
17
18
  import mlrun
18
19
  import mlrun.common.constants as mlrun_constants
@@ -32,7 +33,7 @@ def run_merge_job(
32
33
  merger: BaseMerger,
33
34
  engine: str,
34
35
  engine_args: dict,
35
- spark_service: str = None,
36
+ spark_service: Optional[str] = None,
36
37
  entity_rows=None,
37
38
  entity_timestamp_column=None,
38
39
  run_config=None,
@@ -379,7 +379,7 @@ class Imputer(StepToDict, MLRunStep):
379
379
  self,
380
380
  method: str = "avg",
381
381
  default_value=None,
382
- mapping: dict[str, Any] = None,
382
+ mapping: Optional[dict[str, Any]] = None,
383
383
  **kwargs,
384
384
  ):
385
385
  """Replace None values with default values
@@ -517,7 +517,7 @@ class DateExtractor(StepToDict, MLRunStep):
517
517
  def __init__(
518
518
  self,
519
519
  parts: Union[dict[str, str], list[str]],
520
- timestamp_col: str = None,
520
+ timestamp_col: Optional[str] = None,
521
521
  **kwargs,
522
522
  ):
523
523
  """Date Extractor extracts a date-time component into new columns
mlrun/features.py CHANGED
@@ -41,9 +41,9 @@ class Entity(ModelObj):
41
41
 
42
42
  def __init__(
43
43
  self,
44
- name: str = None,
44
+ name: Optional[str] = None,
45
45
  value_type: Union[ValueType, str] = None,
46
- description: str = None,
46
+ description: Optional[str] = None,
47
47
  labels: Optional[dict[str, str]] = None,
48
48
  ):
49
49
  """data entity (index key)
@@ -80,13 +80,13 @@ class Feature(ModelObj):
80
80
  def __init__(
81
81
  self,
82
82
  value_type: Union[ValueType, str] = None,
83
- dims: list[int] = None,
84
- description: str = None,
85
- aggregate: bool = None,
86
- name: str = None,
83
+ dims: Optional[list[int]] = None,
84
+ description: Optional[str] = None,
85
+ aggregate: Optional[bool] = None,
86
+ name: Optional[str] = None,
87
87
  validator=None,
88
- default: str = None,
89
- labels: dict[str, str] = None,
88
+ default: Optional[str] = None,
89
+ labels: Optional[dict[str, str]] = None,
90
90
  ):
91
91
  """data feature
92
92
 
@@ -231,7 +231,9 @@ class Validator(ModelObj):
231
231
  kind = ""
232
232
  _dict_fields = ["kind", "check_type", "severity"]
233
233
 
234
- def __init__(self, check_type: bool = None, severity: str = None):
234
+ def __init__(
235
+ self, check_type: Optional[bool] = None, severity: Optional[str] = None
236
+ ):
235
237
  """Base validator
236
238
 
237
239
  example::
@@ -268,7 +270,11 @@ class MinMaxValidator(Validator):
268
270
  _dict_fields = Validator._dict_fields + ["min", "max"]
269
271
 
270
272
  def __init__(
271
- self, check_type: bool = None, severity: str = None, min=None, max=None
273
+ self,
274
+ check_type: Optional[bool] = None,
275
+ severity: Optional[str] = None,
276
+ min=None,
277
+ max=None,
272
278
  ):
273
279
  """Validate min/max value ranges
274
280
 
@@ -328,7 +334,11 @@ class MinMaxLenValidator(Validator):
328
334
  _dict_fields = Validator._dict_fields + ["min", "max"]
329
335
 
330
336
  def __init__(
331
- self, check_type: bool = None, severity: str = None, min=None, max=None
337
+ self,
338
+ check_type: Optional[bool] = None,
339
+ severity: Optional[str] = None,
340
+ min=None,
341
+ max=None,
332
342
  ):
333
343
  """Validate min/max length value ranges
334
344
 
@@ -390,7 +400,12 @@ class RegexValidator(Validator):
390
400
  kind = "regex"
391
401
  _dict_fields = Validator._dict_fields + ["regex"]
392
402
 
393
- def __init__(self, check_type: bool = None, severity: str = None, regex=None):
403
+ def __init__(
404
+ self,
405
+ check_type: Optional[bool] = None,
406
+ severity: Optional[str] = None,
407
+ regex=None,
408
+ ):
394
409
  """Validate value based on regular expression
395
410
 
396
411
  example::
@@ -434,7 +449,9 @@ class RegexValidator(Validator):
434
449
  return ok, args
435
450
 
436
451
  @classmethod
437
- def from_dict(cls, struct=None, fields=None, deprecated_fields: dict = None):
452
+ def from_dict(
453
+ cls, struct=None, fields=None, deprecated_fields: Optional[dict] = None
454
+ ):
438
455
  new_obj = super().from_dict(
439
456
  struct=struct, fields=fields, deprecated_fields=deprecated_fields
440
457
  )
@@ -11,6 +11,5 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
14
+
16
15
  from .parallel_coordinates import compare_db_runs, compare_run_objects
@@ -11,8 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
14
+
16
15
  from .artifacts_library import ArtifactsLibrary
17
16
  from .mlrun_interface import MLRunInterface
18
17
  from .model_handler import ModelHandler, with_mlrun_interface, without_mlrun_interface
@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
  #
15
15
  from abc import ABC, abstractmethod
16
- from typing import Union
16
+ from typing import Optional, Union
17
17
 
18
18
  import mlrun
19
19
 
@@ -39,7 +39,7 @@ class ArtifactsLibrary(ABC):
39
39
  @classmethod
40
40
  def get_plans(
41
41
  cls,
42
- artifacts: Union[list[Plan], dict[str, dict], list[str]] = None,
42
+ artifacts: Optional[Union[list[Plan], dict[str, dict], list[str]]] = None,
43
43
  context: mlrun.MLClientCtx = None,
44
44
  include_default: bool = True,
45
45
  # custom_plans: dict = None, :param custom_plans: Custom user plans objects to initialize from.