mlrun 1.8.0rc9__py3-none-any.whl → 1.8.0rc10__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

@@ -13,7 +13,7 @@
13
13
  # limitations under the License.
14
14
 
15
15
  # Don't remove this, used by sphinx documentation
16
- __all__ = ["get_model", "update_model"]
16
+ __all__ = ["get_model", "update_model", "DocumentLoaderSpec", "MLRunLoader"]
17
17
 
18
18
  from .base import (
19
19
  Artifact,
@@ -39,8 +39,6 @@ class DocumentLoaderSpec(ModelObj):
39
39
  src_name (str): The name of the source attribute to pass to the loader class.
40
40
  kwargs (Optional[dict]): Additional keyword arguments to pass to the loader class.
41
41
 
42
- Methods:
43
- make_loader(src_path): Creates an instance of the loader class with the specified source path.
44
42
  """
45
43
 
46
44
  _dict_fields = ["loader_class_name", "src_name", "kwargs"]
@@ -58,6 +56,19 @@ class DocumentLoaderSpec(ModelObj):
58
56
  loader_class_name (str): The name of the loader class to use.
59
57
  src_name (str): The source name for the document.
60
58
  kwargs (Optional[dict]): Additional keyword arguments to pass to the loader class.
59
+
60
+ Example:
61
+ >>> # Create a loader specification for PDF documents
62
+ >>> loader_spec = DocumentLoaderSpec(
63
+ ... loader_class_name="langchain_community.document_loaders.PDFLoader",
64
+ ... src_name="file_path",
65
+ ... kwargs={"extract_images": True},
66
+ ... )
67
+ >>> # Create a loader instance for a specific PDF file
68
+ >>> pdf_loader = loader_spec.make_loader("/path/to/document.pdf")
69
+ >>> # Load the documents
70
+ >>> documents = pdf_loader.load()
71
+
61
72
  """
62
73
  self.loader_class_name = loader_class_name
63
74
  self.src_name = src_name
@@ -87,6 +98,45 @@ class MLRunLoader:
87
98
 
88
99
  Returns:
89
100
  DynamicDocumentLoader: An instance of a dynamically defined subclass of BaseLoader.
101
+
102
+ Example:
103
+ >>> # Create a document loader specification
104
+ >>> loader_spec = DocumentLoaderSpec(
105
+ ... loader_class_name="langchain_community.document_loaders.TextLoader",
106
+ ... src_name="file_path",
107
+ ... )
108
+ >>> # Create a basic loader for a single file
109
+ >>> loader = MLRunLoader(
110
+ ... source_path="/path/to/document.txt",
111
+ ... loader_spec=loader_spec,
112
+ ... artifact_key="my_doc",
113
+ ... producer=project,
114
+ ... upload=True,
115
+ ... )
116
+ >>> documents = loader.load()
117
+ >>> # Create a loader with auto-generated keys
118
+ >>> loader = MLRunLoader(
119
+ ... source_path="/path/to/document.txt",
120
+ ... loader_spec=loader_spec,
121
+ ... artifact_key="doc%%", # %% will be replaced with encoded path
122
+ ... producer=project,
123
+ ... )
124
+ >>> documents = loader.load()
125
+ >>> # Use with DirectoryLoader
126
+ >>> from langchain_community.document_loaders import DirectoryLoader
127
+ >>> dir_loader = DirectoryLoader(
128
+ ... "/path/to/directory",
129
+ ... glob="**/*.txt",
130
+ ... loader_cls=MLRunLoader,
131
+ ... loader_kwargs={
132
+ ... "loader_spec": loader_spec,
133
+ ... "artifact_key": "doc%%",
134
+ ... "producer": project,
135
+ ... "upload": True,
136
+ ... },
137
+ ... )
138
+ >>> documents = dir_loader.load()
139
+
90
140
  """
91
141
 
92
142
  def __new__(
@@ -178,11 +228,6 @@ class MLRunLoader:
178
228
  class DocumentArtifact(Artifact):
179
229
  """
180
230
  A specific artifact class inheriting from generic artifact, used to maintain Document meta-data.
181
-
182
- Methods:
183
- to_langchain_documents(splitter): Create LC documents from the artifact.
184
- collection_add(collection_id): Add a collection ID to the artifact.
185
- collection_remove(collection_id): Remove a collection ID from the artifact.
186
231
  """
187
232
 
188
233
  class DocumentArtifactSpec(ArtifactSpec):
@@ -205,10 +250,6 @@ class DocumentArtifact(Artifact):
205
250
  self.collections = collections if collections is not None else {}
206
251
  self.original_source = original_source
207
252
 
208
- """
209
- A specific artifact class inheriting from generic artifact, used to maintain Document meta-data.
210
- """
211
-
212
253
  kind = "document"
213
254
 
214
255
  METADATA_SOURCE_KEY = "source"
@@ -242,6 +283,7 @@ class DocumentArtifact(Artifact):
242
283
  )
243
284
 
244
285
  def get_source(self):
286
+ """Get the source URI for this artifact."""
245
287
  return generate_artifact_uri(self.metadata.project, self.spec.db_key)
246
288
 
247
289
  def to_langchain_documents(
@@ -44,6 +44,7 @@ class ModelEndpointSchema(MonitoringStrEnum):
44
44
  FUNCTION_TAG = "function_tag"
45
45
  FUNCTION_UID = "function_uid"
46
46
  MODEL_NAME = "model_name"
47
+ MODEL_DB_KEY = "model_db_key"
47
48
  MODEL_TAG = "model_tag"
48
49
  MODEL_CLASS = "model_class"
49
50
  MODEL_UID = "model_uid"
@@ -121,6 +121,7 @@ class ModelEndpointMetadata(ObjectMetadata, ModelEndpointParser):
121
121
  class ModelEndpointSpec(ObjectSpec, ModelEndpointParser):
122
122
  model_uid: Optional[str] = ""
123
123
  model_name: Optional[str] = ""
124
+ model_db_key: Optional[str] = ""
124
125
  model_tag: Optional[str] = ""
125
126
  model_class: Optional[str] = ""
126
127
  function_name: Optional[str] = ""
mlrun/config.py CHANGED
@@ -160,6 +160,7 @@ default_config = {
160
160
  # migration from artifacts to artifacts_v2 is done in batches, and requires a state file to keep track of the
161
161
  # migration progress.
162
162
  "artifact_migration_batch_size": 200,
163
+ "artifact_migration_v9_batch_size": 30000,
163
164
  "artifact_migration_state_file_path": "./db/_artifact_migration_state.json",
164
165
  "datasets": {
165
166
  "max_preview_columns": 100,
@@ -124,6 +124,7 @@ def spark_to_value_type(data_type):
124
124
  "double": ValueType.DOUBLE,
125
125
  "boolean": ValueType.BOOL,
126
126
  "timestamp": ValueType.DATETIME,
127
+ "timestamp_ntz": ValueType.DATETIME,
127
128
  "string": ValueType.STRING,
128
129
  "array": "list",
129
130
  "map": "dict",
mlrun/data_types/spark.py CHANGED
@@ -19,7 +19,7 @@ from typing import Optional
19
19
  import numpy as np
20
20
  import pytz
21
21
  from pyspark.sql.functions import to_utc_timestamp
22
- from pyspark.sql.types import BooleanType, DoubleType, TimestampType
22
+ from pyspark.sql.types import BooleanType, DoubleType
23
23
 
24
24
  from mlrun.feature_store.retrieval.spark_merger import spark_df_to_pandas
25
25
  from mlrun.utils import logger
@@ -144,7 +144,8 @@ def get_df_stats_spark(df, options, num_bins=20, sample_size=None):
144
144
  timestamp_columns = set()
145
145
  boolean_columns = set()
146
146
  for field in df_after_type_casts.schema.fields:
147
- is_timestamp = isinstance(field.dataType, TimestampType)
147
+ # covers TimestampType and TimestampNTZType, which was added in PySpark 3.4.0
148
+ is_timestamp = field.dataType.typeName().startswith("timestamp")
148
149
  is_boolean = isinstance(field.dataType, BooleanType)
149
150
  if is_timestamp:
150
151
  df_after_type_casts = df_after_type_casts.withColumn(
@@ -244,6 +244,15 @@ def _to_corrected_pandas_type(dt):
244
244
 
245
245
 
246
246
  def spark_df_to_pandas(spark_df):
247
+ import pyspark
248
+
249
+ if semver.parse(pyspark.__version__) >= semver.Version(3, 5, 0):
250
+
251
+ def to_pandas(spark_df_inner):
252
+ return spark_df_inner.toPandas()
253
+ else:
254
+ to_pandas = _to_pandas
255
+
247
256
  # as of pyspark 3.2.3, toPandas fails to convert timestamps unless we work around the issue
248
257
  # when we upgrade pyspark, we should check whether this workaround is still necessary
249
258
  # see https://stackoverflow.com/questions/76389694/transforming-pyspark-to-pandas-dataframe
@@ -262,9 +271,9 @@ def spark_df_to_pandas(spark_df):
262
271
  )
263
272
  type_conversion_dict[field.name] = "datetime64[ns]"
264
273
 
265
- df = _to_pandas(spark_df)
274
+ df = to_pandas(spark_df)
266
275
  if type_conversion_dict:
267
276
  df = df.astype(type_conversion_dict)
268
277
  return df
269
278
  else:
270
- return _to_pandas(spark_df)
279
+ return to_pandas(spark_df)
@@ -30,6 +30,8 @@ __all__ = [
30
30
  "DatabricksFileSystemDisableCache",
31
31
  "DatabricksFileBugFixed",
32
32
  "get_stream_pusher",
33
+ "ConfigProfile",
34
+ "VectorStoreCollection",
33
35
  ]
34
36
 
35
37
  import fsspec
@@ -1136,7 +1136,8 @@ class CSVTarget(BaseStoreTarget):
1136
1136
  import pyspark.sql.functions as funcs
1137
1137
 
1138
1138
  for col_name, col_type in df.dtypes:
1139
- if col_type == "timestamp":
1139
+ # covers TimestampType and TimestampNTZType, which was added in PySpark 3.4.0
1140
+ if col_type.startswith("timestamp"):
1140
1141
  # df.write.csv saves timestamps with millisecond precision, but we want microsecond precision
1141
1142
  # for compatibility with storey.
1142
1143
  df = df.withColumn(
@@ -123,23 +123,29 @@ class VectorStoreCollection:
123
123
  Converts artifacts to LangChain documents, adds them to the vector store, and
124
124
  updates the MLRun context. If documents are split, the IDs are handled appropriately.
125
125
 
126
- Args:
127
- artifacts (list[DocumentArtifact]): List of DocumentArtifact objects to add
128
- splitter (optional): Document splitter to break artifacts into smaller chunks.
129
- If None, each artifact becomes a single document.
130
- **kwargs: Additional arguments passed to the underlying add_documents method.
131
- Special handling for 'ids' kwarg:
132
- - If provided and document is split, IDs are generated as "{original_id}_{i}"
133
- where i starts from 1 (e.g., "doc1_1", "doc1_2", etc.)
134
- - If provided and document isn't split, original IDs are used as-is
126
+ :param artifacts: List of DocumentArtifact objects to add
127
+ :type artifacts: list[DocumentArtifact]
128
+ :param splitter: Document splitter to break artifacts into smaller chunks.
129
+ If None, each artifact becomes a single document.
130
+ :type splitter: TextSplitter, optional
131
+ :param kwargs: Additional arguments passed to the underlying add_documents method.
132
+ Special handling for 'ids' kwarg:
133
+
134
+ * If provided and document is split, IDs are generated as "{original_id}_{i}"
135
+ where i starts from 1 (e.g., "doc1_1", "doc1_2", etc.)
136
+ * If provided and document isn't split, original IDs are used as-is
137
+
138
+ :return: List of IDs for all added documents. When no custom IDs are provided:
139
+
140
+ * Without splitting: Vector store generates IDs automatically
141
+ * With splitting: Vector store generates separate IDs for each chunk
135
142
 
136
- Returns:
137
- list: List of IDs for all added documents. When no custom IDs are provided:
138
- - Without splitting: Vector store generates IDs automatically
139
- - With splitting: Vector store generates separate IDs for each chunk
140
143
  When custom IDs are provided:
141
- - Without splitting: Uses provided IDs directly
142
- - With splitting: Generates sequential IDs as "{original_id}_{i}" for each chunk
144
+
145
+ * Without splitting: Uses provided IDs directly
146
+ * With splitting: Generates sequential IDs as "{original_id}_{i}" for each chunk
147
+ :rtype: list
148
+
143
149
  """
144
150
  all_ids = []
145
151
  user_ids = kwargs.pop("ids", None)
@@ -375,8 +375,10 @@ def _generate_model_endpoint(
375
375
  ),
376
376
  spec=mlrun.common.schemas.ModelEndpointSpec(
377
377
  function_name=function_name,
378
- model_name=model_obj.metadata.key if model_path else None,
379
- model_uid=model_obj.metadata.uid if model_path else None,
378
+ model_name=model_obj.metadata.key if model_obj else None,
379
+ model_uid=model_obj.metadata.uid if model_obj else None,
380
+ model_tag=model_obj.metadata.tag if model_obj else None,
381
+ model_db_key=model_obj.spec.db_key if model_obj else None,
380
382
  model_class="drift-analysis",
381
383
  ),
382
384
  status=mlrun.common.schemas.ModelEndpointStatus(
@@ -573,11 +573,13 @@ def _init_endpoint_record(
573
573
  model.get_model()
574
574
  if model.model_spec:
575
575
  model_name = model.model_spec.metadata.key
576
+ model_db_key = model.model_spec.spec.db_key
576
577
  model_uid = model.model_spec.metadata.uid
577
578
  model_tag = model.model_spec.tag
578
579
  model_labels = model.model_spec.labels # todo : check if we still need this
579
580
  else:
580
581
  model_name = None
582
+ model_db_key = None
581
583
  model_uid = None
582
584
  model_tag = None
583
585
  model_labels = {}
@@ -611,9 +613,10 @@ def _init_endpoint_record(
611
613
  function_tag=graph_server.function_tag or "latest",
612
614
  function_uid=function_uid,
613
615
  model_name=model_name,
616
+ model_tag=model_tag,
617
+ model_db_key=model_db_key,
614
618
  model_uid=model_uid,
615
619
  model_class=model.__class__.__name__,
616
- model_tag=model_tag,
617
620
  )
618
621
  model_ep = mlrun.common.schemas.ModelEndpoint(
619
622
  metadata=mlrun.common.schemas.ModelEndpointMetadata(
@@ -627,8 +630,10 @@ def _init_endpoint_record(
627
630
  function_uid=function_uid,
628
631
  function_tag=graph_server.function_tag or "latest",
629
632
  model_name=model_name,
633
+ model_db_key=model_db_key,
630
634
  model_uid=model_uid,
631
635
  model_class=model.__class__.__name__,
636
+ model_tag=model_tag,
632
637
  ),
633
638
  status=mlrun.common.schemas.ModelEndpointStatus(
634
639
  monitoring_mode=mlrun.common.schemas.model_monitoring.ModelMonitoringMode.enabled
@@ -649,6 +654,8 @@ def _init_endpoint_record(
649
654
  attributes[ModelEndpointSchema.MODEL_UID] = model_uid
650
655
  if model_tag != model_ep.spec.model_tag:
651
656
  attributes[ModelEndpointSchema.MODEL_TAG] = model_tag
657
+ if model_db_key != model_ep.spec.model_db_key:
658
+ attributes[ModelEndpointSchema.MODEL_DB_KEY] = model_db_key
652
659
  if model_labels != model_ep.metadata.labels:
653
660
  attributes[ModelEndpointSchema.LABELS] = model_labels
654
661
  if model.__class__.__name__ != model_ep.spec.model_class:
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "5be219a3a066786e96ee697c208cd0f59ef63362",
3
- "version": "1.8.0-rc9"
2
+ "git_commit": "9e2d1e195daed90072d1cd33a5eee339577dc35a",
3
+ "version": "1.8.0-rc10"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.8.0rc9
3
+ Version: 1.8.0rc10
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -1,6 +1,6 @@
1
1
  mlrun/__init__.py,sha256=7vuMpUiigXXDrghLRq680LKWy1faC0kQyGCZb_7cwyE,7473
2
2
  mlrun/__main__.py,sha256=o65gXHhmFA9GV_n2mqmAO80nW3MAwo_s7j80IKgCzRE,45949
3
- mlrun/config.py,sha256=wTGv_ityRKNb5rqrbo2dqkqeYtd8h5i90m3W4AYmt8s,70941
3
+ mlrun/config.py,sha256=HFQv1Qx3swxEW-QDITkakMrx6IcEEwApmkd1o38PDAI,70992
4
4
  mlrun/errors.py,sha256=5raKb1PXQpTcIvWQ4sr1qn2IS7P_GT_FydBJ0dXkVuc,8097
5
5
  mlrun/execution.py,sha256=GDHVdltjcSOtqPlhoAuXQAhvYhc8PhEbsnCIxONSCjg,47435
6
6
  mlrun/features.py,sha256=ReBaNGsBYXqcbgI012n-SO_j6oHIbk_Vpv0CGPXbUmo,15842
@@ -13,10 +13,10 @@ mlrun/secrets.py,sha256=ibtCK79u7JVBZF6F0SP1-xXXF5MyrLEUs_TCWiJAnlc,7798
13
13
  mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
14
14
  mlrun/alerts/alert.py,sha256=mTROlDXzQw5gyWBFaUnykai3wpvjmgRmo28p0ytbzIU,15930
15
15
  mlrun/api/schemas/__init__.py,sha256=fEWH4I8hr5AdRJ7yoW44RlFB6NHkYDxyomP5J6ct1z4,14248
16
- mlrun/artifacts/__init__.py,sha256=5NRo_T5Hrltdssepx6Rs4-2vxuiYooF-OPFtjj0Jfow,1138
16
+ mlrun/artifacts/__init__.py,sha256=ofC2extBCOC1wg1YtdTzWzH3eeG_f-sFBUkHjYtZJpk,1175
17
17
  mlrun/artifacts/base.py,sha256=nz2ZqC74JGfWN0M6_hOXXQj3bXSTxNp4eUgvWHVcdvY,29979
18
18
  mlrun/artifacts/dataset.py,sha256=QTot5vCgLHatlIWwNnKbWdZ8HHTxaZ7wk4gWQDoqQ2k,16655
19
- mlrun/artifacts/document.py,sha256=qt4SP9apOuBQoMwcZbAG6-wLsyTwSGfsASEdrnx8-fc,12380
19
+ mlrun/artifacts/document.py,sha256=yfJB3GHx2bD9efmAYYI5e6xbHAEO8xdiP1Xm6W1NRi8,13988
20
20
  mlrun/artifacts/manager.py,sha256=bXb70mKF6wIGs7syCiFfGnjalqx4g9bO_J5DaVzUUKw,16163
21
21
  mlrun/artifacts/model.py,sha256=jeOjUq_iZSHoNqlPyGgOz6acwje1Yqpg1yZwF9GbyG8,21615
22
22
  mlrun/artifacts/plots.py,sha256=dS0mHGt1b20tN2JyEH9H5o5I0oMKZkzn3Uz_3Hf4WjU,4813
@@ -72,15 +72,15 @@ mlrun/common/schemas/secret.py,sha256=CCxFYiPwJtDxwg2VVJH9nUG9cAZ2a34IjeuaWv-BYl
72
72
  mlrun/common/schemas/tag.py,sha256=HRZi5QZ4vVGaCr2AMk9eJgcNiAIXmH4YDc8a4fvF770,893
73
73
  mlrun/common/schemas/workflow.py,sha256=rwYzDJYxpE9k4kC88j_eUCmqK4ZsWV_h-_nli7Fs7Ow,2078
74
74
  mlrun/common/schemas/model_monitoring/__init__.py,sha256=6gQcNTzCDEd-rDv6qDC5pE78INnV1AHKP9WbysUVNcw,1840
75
- mlrun/common/schemas/model_monitoring/constants.py,sha256=UblKVJpdBDic7PvFSHboRHeTJVP-6O-upPHZ8nM8ZKo,11265
75
+ mlrun/common/schemas/model_monitoring/constants.py,sha256=h6-_FOG3SeKPDx4lS-ckZkgA3WU_Dl4dtezfIMIi61k,11299
76
76
  mlrun/common/schemas/model_monitoring/grafana.py,sha256=Rq10KKOyyUYr7qOQFZfwGZtUim0LY9O0LQ5uc9jmIVQ,1562
77
- mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=Ibb9S3DYi9rgXR8xCYyRK9wrET0MgUheb9BLCfnImzw,10888
77
+ mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=5AZE2N4Ptfa5r_xOYMerGrwS_qEelMkKAk13KR1AgRE,10925
78
78
  mlrun/data_types/__init__.py,sha256=unRo9GGwCmj0hBKBRsXJ2P4BzpQaddlQTvIrVQaKluI,984
79
- mlrun/data_types/data_types.py,sha256=uB9qJusSvPRK2PTvrFBXrS5jcDXMuwqXokJGToDg4VA,4953
79
+ mlrun/data_types/data_types.py,sha256=0_oKLC6-sXL2_nnaDMP_HSXB3fD1nJAG4J2Jq6sGNNw,4998
80
80
  mlrun/data_types/infer.py,sha256=KdaRgWcqvLkuLjXrMuDr3ik6WY7JP5wJO0Yii_Vl5kw,6173
81
- mlrun/data_types/spark.py,sha256=ZyStZZvsPhShbojE-hSASVZW3z0Jc-doI7KrEgdELsM,9547
82
- mlrun/data_types/to_pandas.py,sha256=-ZbJBg00x4xxyqqqu3AVbEh-HaO2--DrChyPuedRhHA,11215
83
- mlrun/datastore/__init__.py,sha256=LW2GLnyeLCmVuIDgd4AJqwP8TXMdt9W5i8kwnfAjPkI,4078
81
+ mlrun/data_types/spark.py,sha256=4fPpqjFCYeFgK_yHhUNM4rT-1Gw9YiXazyjTK7TtbTI,9626
82
+ mlrun/data_types/to_pandas.py,sha256=KOy0FLXPJirsgH6szcC5BI6t70yVDCjuo6LmuYHNTuI,11429
83
+ mlrun/datastore/__init__.py,sha256=ca-9rcmxMimZKq3EHetpptZQ5udkf4O0sm37D7NWaXE,4128
84
84
  mlrun/datastore/alibaba_oss.py,sha256=k-OHVe08HjMewlkpsT657CbOiVFAfSq9_EqhCE-k86s,4940
85
85
  mlrun/datastore/azure_blob.py,sha256=SzAcHYSXkm8Zpopz2Ea-rWVClH0URocUazcNK04S9W0,12776
86
86
  mlrun/datastore/base.py,sha256=Dqg8PqX0TFKHZg27Dgguc3RnQ1GABZiLf87p5ErTqJs,26448
@@ -99,10 +99,10 @@ mlrun/datastore/spark_udf.py,sha256=NnnB3DZxZb-rqpRy7b-NC7QWXuuqFn3XkBDc86tU4mQ,
99
99
  mlrun/datastore/spark_utils.py,sha256=_AsVoU5Ix_-W7Gyq8io8V-2GTk0m8THJNDP3WGGaWJY,2865
100
100
  mlrun/datastore/store_resources.py,sha256=PFOMrZ6KH6hBOb0PiO-cHx_kv0UpHu5P2t8_mrR-lS4,6842
101
101
  mlrun/datastore/storeytargets.py,sha256=uNYG4nCBD3JIfa51CG4cDe9ryc9oIcqUdUXKvCPB6uE,5086
102
- mlrun/datastore/targets.py,sha256=0RuprYSc95Uy-P_fk30d1dMGHDpgx7DAeydZEA3uR_k,80660
102
+ mlrun/datastore/targets.py,sha256=QiEK-mHmUt2qnS2yaBSSKgk8CKqsGU-JoQ9kHoW1bvE,80759
103
103
  mlrun/datastore/utils.py,sha256=ZDAzz0W16_JcM6Q9h4RoMbdruM9eA6YGlA5dw8gW8Bw,7754
104
104
  mlrun/datastore/v3io.py,sha256=QSYBORRLcJTeM9mt0EaWzyLcdmzrPkqrF7k5uLTam5U,8209
105
- mlrun/datastore/vectorstore.py,sha256=Au2nckQV1qJ2ZT7oXGfZF7SsiZ0iwCkSGGMNYpvSLqE,9475
105
+ mlrun/datastore/vectorstore.py,sha256=GUX9G09uCHfERVYAyKc1UdlipadnXU8gEZ-VzF0F0A4,9559
106
106
  mlrun/datastore/wasbfs/__init__.py,sha256=s5Ul-0kAhYqFjKDR2X0O2vDGDbLQQduElb32Ev56Te4,1343
107
107
  mlrun/datastore/wasbfs/fs.py,sha256=ge8NK__5vTcFT-krI155_8RDUywQw4SIRX6BWATXy9Q,6299
108
108
  mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
@@ -216,7 +216,7 @@ mlrun/launcher/factory.py,sha256=RW7mfzEFi8fR0M-4W1JQg1iq3_muUU6OTqT_3l4Ubrk,233
216
216
  mlrun/launcher/local.py,sha256=9zNiuswHnSINDj4yYP2Vd192b5d4FUtSA8O2ICKjsKo,11279
217
217
  mlrun/launcher/remote.py,sha256=rLJW4UAnUT5iUb4BsGBOAV3K4R29a0X4lFtRkVKlyYU,7709
218
218
  mlrun/model_monitoring/__init__.py,sha256=RgXjrQSN7gZwo-URei3FNo3fE9wWlo1-wNhSVtNXbPA,784
219
- mlrun/model_monitoring/api.py,sha256=Zknu0LVsyYRfga4phfuUizYvYFYHvjzM2hlHqD8cSLM,28154
219
+ mlrun/model_monitoring/api.py,sha256=8pXirt4w5RkDB2I5GpVaU3ZzClhEq3Wy4m1l20eJ7pw,28292
220
220
  mlrun/model_monitoring/controller.py,sha256=dBfZQswF67vqeUFnmgsm9jU_5sOs9dLwMPEiYHG-Kk8,19786
221
221
  mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
222
222
  mlrun/model_monitoring/helpers.py,sha256=cGK8ZQgzqW8-TAgpBF30HyHfDmeeSlogskHGxnE_Em8,16091
@@ -306,7 +306,7 @@ mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBI
306
306
  mlrun/serving/states.py,sha256=DDNlyW0AQRgMfnXnYMBaXxF3gZQzxrAJipz_X6JMjjs,61604
307
307
  mlrun/serving/utils.py,sha256=k2EIYDWHUGkE-IBI6T0UNT32fw-KySsccIJM_LObI00,4171
308
308
  mlrun/serving/v1_serving.py,sha256=c6J_MtpE-Tqu00-6r4eJOCO6rUasHDal9W2eBIcrl50,11853
309
- mlrun/serving/v2_serving.py,sha256=3h3_BbwWcc7wvZuJcKg8gsGNRbsHZPzNiRoCz6_gRDs,27986
309
+ mlrun/serving/v2_serving.py,sha256=zfE96u997fhJ3Tk8ClD0dMF5jP0LVoeusfQTvw9tinI,28312
310
310
  mlrun/track/__init__.py,sha256=yVXbT52fXvGKRlc_ByHqIVt7-9L3DRE634RSeQwgXtU,665
311
311
  mlrun/track/tracker.py,sha256=CyTU6Qd3_5GGEJ_hpocOj71wvV65EuFYUjaYEUKAL6Q,3575
312
312
  mlrun/track/tracker_manager.py,sha256=IYBl99I62IC6VCCmG1yt6JoHNOQXa53C4DURJ2sWgio,5726
@@ -337,11 +337,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
337
337
  mlrun/utils/notifications/notification/slack.py,sha256=NKV4RFiY3gLsS8uPppgniPLyag8zJ9O1VhixoXkM7kw,7108
338
338
  mlrun/utils/notifications/notification/webhook.py,sha256=lSGKCQMa-TstKbMpZnU5uQkW14tzIaqjBHDXUNh9dlU,4848
339
339
  mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
340
- mlrun/utils/version/version.json,sha256=o7a7tP023Va9hL6r0fRpV0ka-Do02LiXBAmy4ECxg4o,88
340
+ mlrun/utils/version/version.json,sha256=GdmwGGicuscxhbk6g0vtigM9Ewa5pJg2kbAsIkGueY0,89
341
341
  mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
342
- mlrun-1.8.0rc9.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
343
- mlrun-1.8.0rc9.dist-info/METADATA,sha256=tcz4gulsO8-BR9Adykf7AmvYN-b5SfQpbD4rQNWHEHA,24456
344
- mlrun-1.8.0rc9.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
345
- mlrun-1.8.0rc9.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
346
- mlrun-1.8.0rc9.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
347
- mlrun-1.8.0rc9.dist-info/RECORD,,
342
+ mlrun-1.8.0rc10.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
343
+ mlrun-1.8.0rc10.dist-info/METADATA,sha256=RlIXSYk-UKe61vYPSgtoebkZzLBLPdSyDy18RYbdds4,24457
344
+ mlrun-1.8.0rc10.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
345
+ mlrun-1.8.0rc10.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
346
+ mlrun-1.8.0rc10.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
347
+ mlrun-1.8.0rc10.dist-info/RECORD,,