mlrun 1.7.0rc6__py3-none-any.whl → 1.7.0rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (84) hide show
  1. mlrun/__main__.py +2 -0
  2. mlrun/common/constants.py +6 -0
  3. mlrun/common/schemas/__init__.py +5 -0
  4. mlrun/common/schemas/api_gateway.py +8 -1
  5. mlrun/common/schemas/hub.py +7 -9
  6. mlrun/common/schemas/model_monitoring/__init__.py +4 -0
  7. mlrun/common/schemas/model_monitoring/constants.py +36 -19
  8. mlrun/{model_monitoring/stores/models/__init__.py → common/schemas/pagination.py} +9 -10
  9. mlrun/common/schemas/project.py +16 -10
  10. mlrun/common/types.py +7 -1
  11. mlrun/config.py +35 -10
  12. mlrun/data_types/data_types.py +4 -0
  13. mlrun/datastore/__init__.py +3 -7
  14. mlrun/datastore/alibaba_oss.py +130 -0
  15. mlrun/datastore/azure_blob.py +4 -5
  16. mlrun/datastore/base.py +22 -16
  17. mlrun/datastore/datastore.py +4 -0
  18. mlrun/datastore/datastore_profile.py +19 -1
  19. mlrun/datastore/google_cloud_storage.py +1 -1
  20. mlrun/datastore/snowflake_utils.py +43 -0
  21. mlrun/datastore/sources.py +11 -29
  22. mlrun/datastore/targets.py +131 -11
  23. mlrun/datastore/utils.py +10 -5
  24. mlrun/db/base.py +58 -6
  25. mlrun/db/httpdb.py +183 -77
  26. mlrun/db/nopdb.py +110 -0
  27. mlrun/feature_store/api.py +3 -2
  28. mlrun/feature_store/retrieval/spark_merger.py +27 -23
  29. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +1 -1
  30. mlrun/frameworks/tf_keras/mlrun_interface.py +2 -2
  31. mlrun/kfpops.py +2 -5
  32. mlrun/launcher/base.py +1 -1
  33. mlrun/launcher/client.py +2 -2
  34. mlrun/model.py +1 -0
  35. mlrun/model_monitoring/__init__.py +1 -1
  36. mlrun/model_monitoring/api.py +104 -295
  37. mlrun/model_monitoring/controller.py +25 -25
  38. mlrun/model_monitoring/db/__init__.py +16 -0
  39. mlrun/model_monitoring/{stores → db/stores}/__init__.py +43 -34
  40. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  41. mlrun/model_monitoring/{stores/model_endpoint_store.py → db/stores/base/store.py} +47 -6
  42. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  43. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +49 -0
  44. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/base.py +76 -3
  45. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +68 -0
  46. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/sqlite.py +13 -1
  47. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +662 -0
  48. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  49. mlrun/model_monitoring/{stores/kv_model_endpoint_store.py → db/stores/v3io_kv/kv_store.py} +134 -3
  50. mlrun/model_monitoring/helpers.py +3 -3
  51. mlrun/model_monitoring/stream_processing.py +41 -9
  52. mlrun/model_monitoring/tracking_policy.py +7 -1
  53. mlrun/model_monitoring/writer.py +4 -36
  54. mlrun/projects/pipelines.py +14 -2
  55. mlrun/projects/project.py +141 -122
  56. mlrun/run.py +8 -2
  57. mlrun/runtimes/__init__.py +16 -0
  58. mlrun/runtimes/base.py +10 -1
  59. mlrun/runtimes/kubejob.py +26 -121
  60. mlrun/runtimes/nuclio/api_gateway.py +243 -66
  61. mlrun/runtimes/nuclio/application/application.py +79 -1
  62. mlrun/runtimes/nuclio/application/reverse_proxy.go +9 -1
  63. mlrun/runtimes/nuclio/function.py +14 -8
  64. mlrun/runtimes/nuclio/serving.py +30 -34
  65. mlrun/runtimes/pod.py +171 -0
  66. mlrun/runtimes/utils.py +0 -28
  67. mlrun/serving/remote.py +2 -3
  68. mlrun/serving/routers.py +4 -3
  69. mlrun/serving/server.py +5 -7
  70. mlrun/serving/states.py +40 -23
  71. mlrun/serving/v2_serving.py +4 -3
  72. mlrun/utils/helpers.py +34 -0
  73. mlrun/utils/http.py +1 -1
  74. mlrun/utils/retryer.py +1 -0
  75. mlrun/utils/version/version.json +2 -2
  76. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/METADATA +25 -16
  77. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/RECORD +81 -75
  78. mlrun/model_monitoring/batch.py +0 -933
  79. mlrun/model_monitoring/stores/models/mysql.py +0 -34
  80. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -382
  81. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/LICENSE +0 -0
  82. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/WHEEL +0 -0
  83. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/entry_points.txt +0 -0
  84. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc9.dist-info}/top_level.txt +0 -0
@@ -24,6 +24,32 @@ from .base import BaseMerger
24
24
  from .conversion import PandasConversionMixin
25
25
 
26
26
 
27
+ def spark_df_to_pandas(spark_df):
28
+ # as of pyspark 3.2.3, toPandas fails to convert timestamps unless we work around the issue
29
+ # when we upgrade pyspark, we should check whether this workaround is still necessary
30
+ # see https://stackoverflow.com/questions/76389694/transforming-pyspark-to-pandas-dataframe
31
+ if semver.parse(pd.__version__)["major"] >= 2:
32
+ import pyspark.sql.functions as pyspark_functions
33
+
34
+ type_conversion_dict = {}
35
+ for field in spark_df.schema.fields:
36
+ if str(field.dataType) == "TimestampType":
37
+ spark_df = spark_df.withColumn(
38
+ field.name,
39
+ pyspark_functions.date_format(
40
+ pyspark_functions.to_timestamp(field.name),
41
+ "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS",
42
+ ),
43
+ )
44
+ type_conversion_dict[field.name] = "datetime64[ns]"
45
+ df = PandasConversionMixin.toPandas(spark_df)
46
+ if type_conversion_dict:
47
+ df = df.astype(type_conversion_dict)
48
+ return df
49
+ else:
50
+ return PandasConversionMixin.toPandas(spark_df)
51
+
52
+
27
53
  class SparkFeatureMerger(BaseMerger):
28
54
  engine = "spark"
29
55
  support_offline = True
@@ -166,29 +192,7 @@ class SparkFeatureMerger(BaseMerger):
166
192
  def get_df(self, to_pandas=True):
167
193
  if to_pandas:
168
194
  if self._pandas_df is None:
169
- df = self._result_df
170
- # as of pyspark 3.2.3, toPandas fails to convert timestamps unless we work around the issue
171
- # when we upgrade pyspark, we should check whether this workaround is still necessary
172
- # see https://stackoverflow.com/questions/76389694/transforming-pyspark-to-pandas-dataframe
173
- if semver.parse(pd.__version__)["major"] >= 2:
174
- import pyspark.sql.functions as pyspark_functions
175
-
176
- type_conversion_dict = {}
177
- for field in df.schema.fields:
178
- if str(field.dataType) == "TimestampType":
179
- df = df.withColumn(
180
- field.name,
181
- pyspark_functions.date_format(
182
- pyspark_functions.to_timestamp(field.name),
183
- "yyyy-MM-dd'T'HH:mm:ss.SSSSSSSSS",
184
- ),
185
- )
186
- type_conversion_dict[field.name] = "datetime64[ns]"
187
- df = PandasConversionMixin.toPandas(df)
188
- if type_conversion_dict:
189
- df = df.astype(type_conversion_dict)
190
- else:
191
- df = PandasConversionMixin.toPandas(df)
195
+ df = spark_df_to_pandas(self._result_df)
192
196
  self._pandas_df = df
193
197
  self._set_indexes(self._pandas_df)
194
198
  return self._pandas_df
@@ -17,7 +17,7 @@ from typing import Callable, Union
17
17
  import numpy as np
18
18
  import tensorflow as tf
19
19
  from tensorflow import Tensor, Variable
20
- from tensorflow.keras.callbacks import Callback
20
+ from tensorflow.python.keras.callbacks import Callback
21
21
 
22
22
  import mlrun
23
23
 
@@ -19,7 +19,8 @@ from typing import Union
19
19
 
20
20
  import tensorflow as tf
21
21
  from tensorflow import keras
22
- from tensorflow.keras.callbacks import (
22
+ from tensorflow.keras.optimizers import Optimizer
23
+ from tensorflow.python.keras.callbacks import (
23
24
  BaseLogger,
24
25
  Callback,
25
26
  CSVLogger,
@@ -27,7 +28,6 @@ from tensorflow.keras.callbacks import (
27
28
  ProgbarLogger,
28
29
  TensorBoard,
29
30
  )
30
- from tensorflow.keras.optimizers import Optimizer
31
31
 
32
32
  import mlrun
33
33
 
mlrun/kfpops.py CHANGED
@@ -103,7 +103,7 @@ def write_kfpmeta(struct):
103
103
  with open(path, "w") as fp:
104
104
  fp.write(str(val))
105
105
  except Exception as exc:
106
- logger.warning("Failed writing to temp file. Ignoring", exc=repr(exc))
106
+ logger.warning("Failed writing to temp file. Ignoring", exc=err_to_str(exc))
107
107
  pass
108
108
 
109
109
  text = "# Run Report\n"
@@ -112,10 +112,7 @@ def write_kfpmeta(struct):
112
112
 
113
113
  text += "## Metadata\n```yaml\n" + dict_to_yaml(struct) + "```\n"
114
114
 
115
- metadata = {
116
- "outputs": output_artifacts
117
- + [{"type": "markdown", "storage": "inline", "source": text}]
118
- }
115
+ metadata = {"outputs": [{"type": "markdown", "storage": "inline", "source": text}]}
119
116
  with open(os.path.join(KFPMETA_DIR, "mlpipeline-ui-metadata.json"), "w") as f:
120
117
  json.dump(metadata, f)
121
118
 
mlrun/launcher/base.py CHANGED
@@ -353,7 +353,7 @@ class BaseLauncher(abc.ABC):
353
353
  or {}
354
354
  )
355
355
  state_thresholds = (
356
- mlrun.config.config.function.spec.state_thresholds.default.to_dict()
356
+ mlrun.mlconf.function.spec.state_thresholds.default.to_dict()
357
357
  | state_thresholds
358
358
  )
359
359
  run.spec.state_thresholds = state_thresholds or run.spec.state_thresholds
mlrun/launcher/client.py CHANGED
@@ -47,7 +47,7 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
47
47
  If build is needed, set the image as the base_image for the build.
48
48
  If image is not given set the default one.
49
49
  """
50
- if runtime.kind in mlrun.runtimes.RuntimeKinds.nuclio_runtimes():
50
+ if runtime.kind in mlrun.runtimes.RuntimeKinds.pure_nuclio_deployed_runtimes():
51
51
  return
52
52
 
53
53
  require_build = runtime.requires_build()
@@ -129,7 +129,7 @@ class ClientBaseLauncher(launcher.BaseLauncher, abc.ABC):
129
129
  logger.info("no returned result (job may still be in progress)")
130
130
  results_tbl.append(run.to_dict())
131
131
 
132
- if mlrun.utils.is_ipython and mlrun.config.config.ipython_widget:
132
+ if mlrun.utils.is_ipython and mlrun.mlconf.ipython_widget:
133
133
  results_tbl.show()
134
134
  print()
135
135
  ui_url = mlrun.utils.get_ui_url(project, uid)
mlrun/model.py CHANGED
@@ -71,6 +71,7 @@ class ModelObj:
71
71
  return new_type.from_dict(param)
72
72
  return param
73
73
 
74
+ @mlrun.utils.filter_warnings("ignore", FutureWarning)
74
75
  def to_dict(
75
76
  self, fields: list = None, exclude: list = None, strip: bool = False
76
77
  ) -> dict:
@@ -15,7 +15,7 @@
15
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
16
  # for backwards compatibility
17
17
 
18
+ from .db import get_store_object
18
19
  from .helpers import get_stream_path
19
20
  from .model_endpoint import ModelEndpoint
20
- from .stores import ModelEndpointStore, ModelEndpointStoreType, get_model_endpoint_store
21
21
  from .tracking_policy import TrackingPolicy