mlrun 1.8.0rc55__py3-none-any.whl → 1.8.0rc57__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

mlrun/config.py CHANGED
@@ -634,12 +634,6 @@ default_config = {
634
634
  "offline_storage_path": "model-endpoints/{kind}",
635
635
  "parquet_batching_max_events": 10_000,
636
636
  "parquet_batching_timeout_secs": timedelta(minutes=1).total_seconds(),
637
- "tdengine": {
638
- "run_directly": True,
639
- # timeout and retry are ignored when run_directly is set to True
640
- "timeout": 10,
641
- "retries": 1,
642
- },
643
637
  },
644
638
  "secret_stores": {
645
639
  # Use only in testing scenarios (such as integration tests) to avoid using k8s for secrets (will use in-memory
@@ -111,7 +111,7 @@ def schema_to_store(schema):
111
111
 
112
112
  def uri_to_ipython(link):
113
113
  schema, endpoint, parsed_url = parse_url(link)
114
- if schema in [DB_SCHEMA, "memory"]:
114
+ if schema in [DB_SCHEMA, "memory", "ds"]:
115
115
  return ""
116
116
  return schema_to_store(schema).uri_to_ipython(endpoint, parsed_url.path)
117
117
 
mlrun/db/httpdb.py CHANGED
@@ -937,7 +937,7 @@ class HTTPRunDB(RunDBInterface):
937
937
 
938
938
  :param name: Name of the run to retrieve.
939
939
  :param uid: Unique ID of the run, or a list of run UIDs.
940
- :param project: Project that the runs belongs to.
940
+ :param project: Project that the runs belongs to. If not specified, the default project will be used.
941
941
  :param labels: Filter runs by label key-value pairs or key existence. This can be provided as:
942
942
  - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
943
943
  or `{"label": None}` to check for key existence.
@@ -1276,8 +1276,8 @@ class HTTPRunDB(RunDBInterface):
1276
1276
  :param producer_uri: Return artifacts produced by the requested producer URI. Producer URI usually
1277
1277
  points to a run and is used to filter artifacts by the run that produced them when the artifact producer id
1278
1278
  is a workflow id (artifact was created as part of a workflow).
1279
- :param format_: The format in which to return the artifacts. Default is 'full'.
1280
- :param limit: Maximum number of artifacts to return.
1279
+ :param format_: The format in which to return the artifacts. Default is 'full'.
1280
+ :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.10.0).
1281
1281
  :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
1282
1282
  parameter must be provided as well.
1283
1283
  :param rows_per_partition: How many top rows (per sorting defined by `partition_sort_by` and `partition_order`)
@@ -5099,6 +5099,13 @@ class HTTPRunDB(RunDBInterface):
5099
5099
  project = project or config.default_project
5100
5100
  labels = self._parse_labels(labels)
5101
5101
 
5102
+ if limit:
5103
+ # TODO: Remove this in 1.10.0
5104
+ warnings.warn(
5105
+ "'limit' is deprecated and will be removed in 1.10.0. Use 'page' and 'page_size' instead.",
5106
+ FutureWarning,
5107
+ )
5108
+
5102
5109
  params = {
5103
5110
  "name": name,
5104
5111
  "tag": tag,
@@ -82,7 +82,8 @@ class TSDBConnector(ABC):
82
82
 
83
83
  @abstractmethod
84
84
  def delete_tsdb_records(
85
- self, endpoint_ids: list[str], delete_timeout: Optional[int] = None
85
+ self,
86
+ endpoint_ids: list[str],
86
87
  ) -> None:
87
88
  """
88
89
  Delete model endpoint records from the TSDB connector.
@@ -0,0 +1,213 @@
1
+ # Copyright 2025 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+
15
+ import traceback
16
+ from collections.abc import Callable
17
+ from enum import Enum
18
+ from typing import Any, Final, Optional, Union
19
+
20
+ import taosws
21
+ from taosws import TaosStmt
22
+
23
+
24
+ class _StrEnum(str, Enum):
25
+ pass
26
+
27
+
28
+ class TimestampPrecision(_StrEnum):
29
+ ms = "ms" # milliseconds
30
+ us = "us" # microseconds
31
+ ns = "ns" # nanoseconds
32
+
33
+
34
+ _TS_PRECISION_TO_FACTOR_AND_FUNC: Final[
35
+ dict[TimestampPrecision, tuple[int, Callable[[list[int]], taosws.PyColumnView]]]
36
+ ] = {
37
+ TimestampPrecision.ms: (10**3, taosws.millis_timestamps_to_column),
38
+ TimestampPrecision.us: (10**6, taosws.micros_timestamps_to_column),
39
+ TimestampPrecision.ns: (10**9, taosws.nanos_timestamps_to_column),
40
+ }
41
+
42
+
43
+ class QueryResult:
44
+ def __init__(self, data, fields):
45
+ self.data = data
46
+ self.fields = fields
47
+
48
+ def __eq__(self, other):
49
+ return self.data == other.data and self.fields == other.fields
50
+
51
+ def __repr__(self):
52
+ return f"QueryResult({self.data}, {self.fields})"
53
+
54
+
55
+ class Field:
56
+ def __init__(self, name, type, bytes):
57
+ self.name = name
58
+ self.type = type
59
+ self.bytes = bytes
60
+
61
+ def __eq__(self, other):
62
+ return (
63
+ self.name == other.name
64
+ and self.type == other.type
65
+ and self.bytes == other.bytes
66
+ )
67
+
68
+ def __repr__(self):
69
+ return f"Field({self.name}, {self.type}, {self.bytes})"
70
+
71
+
72
+ class TDEngineError(Exception):
73
+ pass
74
+
75
+
76
+ class ErrorResult:
77
+ def __init__(self, tb, err):
78
+ self.tb = tb
79
+ self.err = err
80
+
81
+
82
+ def _get_timestamp_column(
83
+ values: list, timestamp_precision: TimestampPrecision
84
+ ) -> taosws.PyColumnView:
85
+ factor, to_col_func = _TS_PRECISION_TO_FACTOR_AND_FUNC[timestamp_precision]
86
+ timestamps = [round(timestamp.timestamp() * factor) for timestamp in values]
87
+ return to_col_func(timestamps)
88
+
89
+
90
+ def values_to_column(
91
+ values: list,
92
+ column_type: str,
93
+ timestamp_precision: TimestampPrecision = TimestampPrecision.ms,
94
+ ) -> taosws.PyColumnView:
95
+ if column_type == "TIMESTAMP":
96
+ return _get_timestamp_column(values, timestamp_precision)
97
+ if column_type == "FLOAT":
98
+ return taosws.floats_to_column(values)
99
+ if column_type == "INT":
100
+ return taosws.ints_to_column(values)
101
+ if column_type.startswith("BINARY"):
102
+ return taosws.binary_to_column(values)
103
+
104
+ raise NotImplementedError(f"Unsupported column type '{column_type}'")
105
+
106
+
107
+ class Statement:
108
+ def __init__(
109
+ self,
110
+ columns: dict[str, str],
111
+ subtable: str,
112
+ values: dict[str, Any],
113
+ timestamp_precision: str = TimestampPrecision.ms,
114
+ ) -> None:
115
+ self.columns = columns
116
+ self.subtable = subtable
117
+ self.values = values
118
+ self.timestamp_precision = TimestampPrecision[timestamp_precision]
119
+
120
+ def prepare(self, statement: TaosStmt) -> TaosStmt:
121
+ question_marks = ", ".join("?" * len(self.columns))
122
+ statement.prepare(f"INSERT INTO ? VALUES ({question_marks});")
123
+ statement.set_tbname(self.subtable)
124
+
125
+ bind_params = []
126
+
127
+ for col_name, col_type in self.columns.items():
128
+ val = self.values[col_name]
129
+ bind_params.append(
130
+ values_to_column(
131
+ [val], col_type, timestamp_precision=self.timestamp_precision
132
+ )
133
+ )
134
+
135
+ statement.bind_param(bind_params)
136
+ statement.add_batch()
137
+ return statement
138
+
139
+
140
+ def _run(connection_string, prefix_statements, q, statements, query):
141
+ try:
142
+ conn = taosws.connect(connection_string)
143
+
144
+ for statement in prefix_statements + statements:
145
+ if isinstance(statement, Statement):
146
+ prepared_statement = statement.prepare(conn.statement())
147
+ prepared_statement.execute()
148
+ else:
149
+ conn.execute(statement)
150
+
151
+ if not query:
152
+ q.put(None)
153
+ return
154
+
155
+ res = conn.query(query)
156
+
157
+ # taosws.TaosField is not serializable
158
+ fields = [
159
+ Field(field.name(), field.type(), field.bytes()) for field in res.fields
160
+ ]
161
+
162
+ q.put(QueryResult(list(res), fields))
163
+ except Exception as e:
164
+ tb = traceback.format_exc()
165
+ q.put(ErrorResult(tb, e))
166
+
167
+
168
+ class TDEngineConnection:
169
+ def __init__(self, connection_string):
170
+ self._connection_string = connection_string
171
+ self.prefix_statements = []
172
+
173
+ self._conn = taosws.connect(self._connection_string)
174
+
175
+ def run(
176
+ self,
177
+ statements: Optional[Union[str, Statement, list[Union[str, Statement]]]] = None,
178
+ query: Optional[str] = None,
179
+ ) -> Optional[QueryResult]:
180
+ statements = statements or []
181
+ if not isinstance(statements, list):
182
+ statements = [statements]
183
+
184
+ for statement in self.prefix_statements + statements:
185
+ if isinstance(statement, Statement):
186
+ try:
187
+ prepared_statement = statement.prepare(self._conn.statement())
188
+ prepared_statement.execute()
189
+ except taosws.Error as e:
190
+ raise TDEngineError(
191
+ f"Failed to run prepared statement `{self._conn.statement()}`: {e}"
192
+ ) from e
193
+ else:
194
+ try:
195
+ self._conn.execute(statement)
196
+ except taosws.Error as e:
197
+ raise TDEngineError(
198
+ f"Failed to run statement `{statement}`: {e}"
199
+ ) from e
200
+
201
+ if not query:
202
+ return None
203
+
204
+ try:
205
+ res = self._conn.query(query)
206
+ except taosws.Error as e:
207
+ raise TDEngineError(f"Failed to run query `{query}`: {e}") from e
208
+
209
+ fields = [
210
+ Field(field.name(), field.type(), field.bytes()) for field in res.fields
211
+ ]
212
+
213
+ return QueryResult(list(res), fields)
@@ -18,10 +18,6 @@ from typing import Callable, Final, Literal, Optional, Union
18
18
 
19
19
  import pandas as pd
20
20
  import taosws
21
- from taoswswrap.tdengine_connection import (
22
- Statement,
23
- TDEngineConnection,
24
- )
25
21
 
26
22
  import mlrun.common.schemas.model_monitoring as mm_schemas
27
23
  import mlrun.common.types
@@ -29,6 +25,10 @@ import mlrun.model_monitoring.db.tsdb.tdengine.schemas as tdengine_schemas
29
25
  import mlrun.model_monitoring.db.tsdb.tdengine.stream_graph_steps
30
26
  from mlrun.datastore.datastore_profile import DatastoreProfile
31
27
  from mlrun.model_monitoring.db import TSDBConnector
28
+ from mlrun.model_monitoring.db.tsdb.tdengine.tdengine_connection import (
29
+ Statement,
30
+ TDEngineConnection,
31
+ )
32
32
  from mlrun.model_monitoring.helpers import get_invocations_fqn
33
33
  from mlrun.utils import logger
34
34
 
@@ -74,12 +74,6 @@ class TDEngineConnector(TSDBConnector):
74
74
 
75
75
  self._init_super_tables()
76
76
 
77
- self._run_directly = (
78
- mlrun.mlconf.model_endpoint_monitoring.tdengine.run_directly
79
- )
80
- self._timeout = mlrun.mlconf.model_endpoint_monitoring.tdengine.timeout
81
- self._retries = mlrun.mlconf.model_endpoint_monitoring.tdengine.retries
82
-
83
77
  @property
84
78
  def connection(self) -> TDEngineConnection:
85
79
  global _connection
@@ -97,7 +91,7 @@ class TDEngineConnector(TSDBConnector):
97
91
  """Establish a connection to the TSDB server."""
98
92
  logger.debug("Creating a new connection to TDEngine", project=self.project)
99
93
  conn = TDEngineConnection(
100
- self._tdengine_connection_profile.dsn(), run_directly=self._run_directly
94
+ self._tdengine_connection_profile.dsn(),
101
95
  )
102
96
  conn.prefix_statements = [f"USE {self.database}"]
103
97
 
@@ -125,8 +119,6 @@ class TDEngineConnector(TSDBConnector):
125
119
  self.connection.prefix_statements = []
126
120
  self.connection.run(
127
121
  statements=f"CREATE DATABASE IF NOT EXISTS {self.database} PRECISION '{self._timestamp_precision}'",
128
- timeout=self._timeout,
129
- retries=self._retries,
130
122
  )
131
123
  self.connection.prefix_statements = [f"USE {self.database}"]
132
124
  logger.debug(
@@ -146,8 +138,6 @@ class TDEngineConnector(TSDBConnector):
146
138
  conn = self.connection
147
139
  conn.run(
148
140
  statements=create_table_query,
149
- timeout=self._timeout,
150
- retries=self._retries,
151
141
  )
152
142
 
153
143
  def write_application_event(
@@ -207,8 +197,6 @@ class TDEngineConnector(TSDBConnector):
207
197
  create_table_sql,
208
198
  insert_statement,
209
199
  ],
210
- timeout=self._timeout,
211
- retries=self._retries,
212
200
  )
213
201
 
214
202
  @staticmethod
@@ -312,7 +300,8 @@ class TDEngineConnector(TSDBConnector):
312
300
  )
313
301
 
314
302
  def delete_tsdb_records(
315
- self, endpoint_ids: list[str], delete_timeout: Optional[int] = None
303
+ self,
304
+ endpoint_ids: list[str],
316
305
  ):
317
306
  """
318
307
  To delete subtables within TDEngine, we first query the subtables names with the provided endpoint_ids.
@@ -333,8 +322,6 @@ class TDEngineConnector(TSDBConnector):
333
322
  )
334
323
  subtables_result = self.connection.run(
335
324
  query=get_subtable_query,
336
- timeout=self._timeout,
337
- retries=self._retries,
338
325
  )
339
326
  subtables.extend([subtable[0] for subtable in subtables_result.data])
340
327
  except Exception as e:
@@ -355,8 +342,6 @@ class TDEngineConnector(TSDBConnector):
355
342
  try:
356
343
  self.connection.run(
357
344
  statements=drop_statements,
358
- timeout=delete_timeout or self._timeout,
359
- retries=self._retries,
360
345
  )
361
346
  except Exception as e:
362
347
  logger.warning(
@@ -387,8 +372,6 @@ class TDEngineConnector(TSDBConnector):
387
372
  try:
388
373
  self.connection.run(
389
374
  statements=drop_statements,
390
- timeout=self._timeout,
391
- retries=self._retries,
392
375
  )
393
376
  except Exception as e:
394
377
  logger.warning(
@@ -412,8 +395,6 @@ class TDEngineConnector(TSDBConnector):
412
395
  try:
413
396
  table_name = self.connection.run(
414
397
  query=query_random_table_name,
415
- timeout=self._timeout,
416
- retries=self._retries,
417
398
  )
418
399
  if len(table_name.data) == 0:
419
400
  # no tables were found under the database
@@ -436,8 +417,6 @@ class TDEngineConnector(TSDBConnector):
436
417
  try:
437
418
  self.connection.run(
438
419
  statements=drop_database_query,
439
- timeout=self._timeout,
440
- retries=self._retries,
441
420
  )
442
421
  logger.debug(
443
422
  "The TDEngine database has been successfully dropped",
@@ -530,7 +509,7 @@ class TDEngineConnector(TSDBConnector):
530
509
  logger.debug("Querying TDEngine", query=full_query)
531
510
  try:
532
511
  query_result = self.connection.run(
533
- query=full_query, timeout=self._timeout, retries=self._retries
512
+ query=full_query,
534
513
  )
535
514
  except taosws.QueryError as e:
536
515
  raise mlrun.errors.MLRunInvalidArgumentError(
@@ -455,12 +455,20 @@ class V3IOTSDBConnector(TSDBConnector):
455
455
  # Delete all tables
456
456
  tables = mm_schemas.V3IOTSDBTables.list()
457
457
  for table_to_delete in tables:
458
- try:
459
- self.frames_client.delete(backend=_TSDB_BE, table=table_to_delete)
460
- except v3io_frames.DeleteError as e:
458
+ if table_to_delete in self.tables:
459
+ try:
460
+ self.frames_client.delete(
461
+ backend=_TSDB_BE, table=self.tables[table_to_delete]
462
+ )
463
+ except v3io_frames.DeleteError as e:
464
+ logger.warning(
465
+ f"Failed to delete TSDB table '{table_to_delete}'",
466
+ err=mlrun.errors.err_to_str(e),
467
+ )
468
+ else:
461
469
  logger.warning(
462
- f"Failed to delete TSDB table '{table}'",
463
- err=mlrun.errors.err_to_str(e),
470
+ f"Skipping deletion: table '{table_to_delete}' is not among the initialized tables.",
471
+ initialized_tables=list(self.tables.keys()),
464
472
  )
465
473
 
466
474
  # Final cleanup of tsdb path
@@ -470,7 +478,8 @@ class V3IOTSDBConnector(TSDBConnector):
470
478
  store.rm(tsdb_path, recursive=True)
471
479
 
472
480
  def delete_tsdb_records(
473
- self, endpoint_ids: list[str], delete_timeout: Optional[int] = None
481
+ self,
482
+ endpoint_ids: list[str],
474
483
  ):
475
484
  logger.debug(
476
485
  "Deleting model endpoints resources using the V3IO TSDB connector",
mlrun/projects/project.py CHANGED
@@ -470,7 +470,8 @@ def get_or_create_project(
470
470
  parameters: Optional[dict] = None,
471
471
  allow_cross_project: Optional[bool] = None,
472
472
  ) -> "MlrunProject":
473
- """Load a project from MLRun DB, or create/import if it does not exist
473
+ """Load a project from MLRun DB, or create/import if it does not exist.
474
+ The project will become the default project for the current session.
474
475
 
475
476
  MLRun looks for a project.yaml file with project definition and objects in the project root path
476
477
  and use it to initialize the project, in addition it runs the project_setup.py file (if it exists)
@@ -4322,7 +4323,7 @@ class MlrunProject(ModelObj):
4322
4323
  :param kind: Return artifacts of the requested kind.
4323
4324
  :param category: Return artifacts of the requested category.
4324
4325
  :param tree: Return artifacts of the requested tree.
4325
- :param limit: Maximum number of artifacts to return.
4326
+ :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.10.0).
4326
4327
  :param format_: The format in which to return the artifacts. Default is 'full'.
4327
4328
  :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
4328
4329
  parameter must be provided as well.
@@ -4333,6 +4334,14 @@ class MlrunProject(ModelObj):
4333
4334
  :param partition_order: Order of sorting within partitions - `asc` or `desc`. Default is `desc`.
4334
4335
  """
4335
4336
  db = mlrun.db.get_run_db(secrets=self._secrets)
4337
+
4338
+ if limit:
4339
+ # TODO: Remove this in 1.10.0
4340
+ warnings.warn(
4341
+ "'limit' is deprecated and will be removed in 1.10.0. Use 'page' and 'page_size' instead.",
4342
+ FutureWarning,
4343
+ )
4344
+
4336
4345
  return db.list_artifacts(
4337
4346
  name,
4338
4347
  self.metadata.name,
mlrun/run.py CHANGED
@@ -637,7 +637,7 @@ def code_to_function(
637
637
  - databricks: run code on Databricks cluster (python scripts, Spark etc.)
638
638
  - application: run a long living application (e.g. a web server, UI, etc.)
639
639
 
640
- Learn more about [Kinds of function (runtimes)](../concepts/functions-overview.html).
640
+ Learn more about :doc:`../../concepts/functions-overview`
641
641
 
642
642
  :param name: function name, typically best to use hyphen-case
643
643
  :param project: project used to namespace the function, defaults to 'default'
mlrun/runtimes/mounts.py CHANGED
@@ -352,10 +352,12 @@ def auto_mount(
352
352
  """Choose the mount based on env variables and params
353
353
 
354
354
  Volume will be selected by the following order:
355
+
355
356
  - k8s PVC volume when both pvc_name and volume_mount_path are set
356
357
  - k8s PVC volume when env var is set: MLRUN_PVC_MOUNT=<pvc-name>:<mount-path>
357
358
  - k8s PVC volume if it's configured as the auto mount type
358
359
  - iguazio v3io volume when V3IO_ACCESS_KEY and V3IO_USERNAME env vars are set
360
+
359
361
  """
360
362
  if pvc_name and volume_mount_path:
361
363
  return mount_pvc(
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "3b941a1d577b40febe419e448b3b4da3cc85d127",
3
- "version": "1.8.0-rc55"
2
+ "git_commit": "f2265655b8f005d0446f9f5c81b60dd359e36daa",
3
+ "version": "1.8.0-rc57"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: mlrun
3
- Version: 1.8.0rc55
3
+ Version: 1.8.0rc57
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -52,7 +52,7 @@ Requires-Dist: deprecated~=1.2
52
52
  Requires-Dist: jinja2>=3.1.3,~=3.1
53
53
  Requires-Dist: orjson<4,>=3.9.15
54
54
  Requires-Dist: mlrun-pipelines-kfp-common~=0.3.12
55
- Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.3.9; python_version < "3.11"
55
+ Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.3.10; python_version < "3.11"
56
56
  Requires-Dist: docstring_parser~=0.16
57
57
  Requires-Dist: aiosmtplib~=3.0
58
58
  Provides-Extra: s3
@@ -99,7 +99,6 @@ Requires-Dist: ossfs==2023.12.0; extra == "alibaba-oss"
99
99
  Requires-Dist: oss2==2.18.1; extra == "alibaba-oss"
100
100
  Provides-Extra: tdengine
101
101
  Requires-Dist: taos-ws-py==0.3.2; extra == "tdengine"
102
- Requires-Dist: taoswswrap~=0.3.5; extra == "tdengine"
103
102
  Provides-Extra: snowflake
104
103
  Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
105
104
  Provides-Extra: kfp18
@@ -119,7 +118,7 @@ Requires-Dist: timelength~=1.1; extra == "api"
119
118
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
120
119
  Requires-Dist: aiosmtplib~=3.0; extra == "api"
121
120
  Requires-Dist: pydantic<2,>=1; extra == "api"
122
- Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.9; python_version < "3.11" and extra == "api"
121
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.10; python_version < "3.11" and extra == "api"
123
122
  Requires-Dist: grpcio~=1.70.0; extra == "api"
124
123
  Provides-Extra: all
125
124
  Requires-Dist: adlfs==2023.9.0; extra == "all"
@@ -152,7 +151,6 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "all"
152
151
  Requires-Dist: snowflake-connector-python~=3.7; extra == "all"
153
152
  Requires-Dist: sqlalchemy~=1.4; extra == "all"
154
153
  Requires-Dist: taos-ws-py==0.3.2; extra == "all"
155
- Requires-Dist: taoswswrap~=0.3.5; extra == "all"
156
154
  Provides-Extra: complete
157
155
  Requires-Dist: adlfs==2023.9.0; extra == "complete"
158
156
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete"
@@ -184,7 +182,6 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete"
184
182
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete"
185
183
  Requires-Dist: sqlalchemy~=1.4; extra == "complete"
186
184
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete"
187
- Requires-Dist: taoswswrap~=0.3.5; extra == "complete"
188
185
  Provides-Extra: complete-api
189
186
  Requires-Dist: adlfs==2023.9.0; extra == "complete-api"
190
187
  Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete-api"
@@ -215,7 +212,7 @@ Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
215
212
  Requires-Dist: kafka-python~=2.0; extra == "complete-api"
216
213
  Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "complete-api"
217
214
  Requires-Dist: mlflow~=2.16; extra == "complete-api"
218
- Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.9; python_version < "3.11" and extra == "complete-api"
215
+ Requires-Dist: mlrun-pipelines-kfp-v1-8[kfp]~=0.3.10; python_version < "3.11" and extra == "complete-api"
219
216
  Requires-Dist: msrest~=0.6.21; extra == "complete-api"
220
217
  Requires-Dist: objgraph~=3.6; extra == "complete-api"
221
218
  Requires-Dist: oss2==2.18.1; extra == "complete-api"
@@ -229,7 +226,6 @@ Requires-Dist: s3fs<2024.7,>=2023.9.2; extra == "complete-api"
229
226
  Requires-Dist: snowflake-connector-python~=3.7; extra == "complete-api"
230
227
  Requires-Dist: sqlalchemy~=1.4; extra == "complete-api"
231
228
  Requires-Dist: taos-ws-py==0.3.2; extra == "complete-api"
232
- Requires-Dist: taoswswrap~=0.3.5; extra == "complete-api"
233
229
  Requires-Dist: timelength~=1.1; extra == "complete-api"
234
230
  Requires-Dist: uvicorn~=0.32.1; extra == "complete-api"
235
231
  Dynamic: author
@@ -1,6 +1,6 @@
1
1
  mlrun/__init__.py,sha256=Cqm9U9eCEdLpMejhU2BEhubu0mHL71igJJIwYa738EA,7450
2
2
  mlrun/__main__.py,sha256=0NDzPf9VFRO8KFfGgb8mkGUPIDS285aASV8Hbxs-ND0,45920
3
- mlrun/config.py,sha256=sYNeLy-LztiWnmfGmM-LMcGEDv6PM_8b7e_YjIcoDzs,72126
3
+ mlrun/config.py,sha256=xxmIe0g1YP2Y5_R_uQjP3w6v_4lA7iTjXhAyojdY08I,71929
4
4
  mlrun/errors.py,sha256=LkcbXTLANGdsgo2CRX2pdbyNmt--lMsjGv0XZMgP-Nc,8222
5
5
  mlrun/execution.py,sha256=FUktsD3puSFjc3LZJU35b-OmFBrBPBNntViCLQVuwnk,50008
6
6
  mlrun/features.py,sha256=ReBaNGsBYXqcbgI012n-SO_j6oHIbk_Vpv0CGPXbUmo,15842
@@ -8,7 +8,7 @@ mlrun/k8s_utils.py,sha256=-RmKAlSBo_qVeJa1bIiwi6TUyuEpb4AhF7wIQ_H5ZJ0,8909
8
8
  mlrun/lists.py,sha256=-nbmqScRia0v2IdSHt6Pd0fLRLSEtdB9bSxyD92BWvs,8562
9
9
  mlrun/model.py,sha256=5YedJfY9La867fhW8sZJdWb4FwyXPR1r1C5SqYyB4_w,85864
10
10
  mlrun/render.py,sha256=940H9fBBFeghH4dlifbURvtjlvw4GlWdAXezN6ky4rI,13275
11
- mlrun/run.py,sha256=n9n5IWBEaOrMIeSakp01DyL09_6FvLy3LCqWpBtvc08,45140
11
+ mlrun/run.py,sha256=0ORoMtEq6-D1pZLHcYMb2szCFXS3P6N8XhAzu6Ud1NA,45112
12
12
  mlrun/secrets.py,sha256=dZPdkc_zzfscVQepOHUwmzFqnBavDCBXV9DQoH_eIYM,7800
13
13
  mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
14
14
  mlrun/alerts/alert.py,sha256=QQFZGydQbx9RvAaSiaH-ALQZVcDKQX5lgizqj_rXW2k,15948
@@ -85,7 +85,7 @@ mlrun/datastore/__init__.py,sha256=81ulmQnRk1ENvwYOdetxqsLnr2gYVtW-KsvF-tY1Jxk,5
85
85
  mlrun/datastore/alibaba_oss.py,sha256=k-OHVe08HjMewlkpsT657CbOiVFAfSq9_EqhCE-k86s,4940
86
86
  mlrun/datastore/azure_blob.py,sha256=SzAcHYSXkm8Zpopz2Ea-rWVClH0URocUazcNK04S9W0,12776
87
87
  mlrun/datastore/base.py,sha256=9R3lwB_L4hv5WW2q24WS62_KTh-wO4UG6pwzISZU6bM,26231
88
- mlrun/datastore/datastore.py,sha256=frUYYP4i8ZmnY8GNXSgN_3x_exRgRPfxrCtAGEUifEU,9478
88
+ mlrun/datastore/datastore.py,sha256=AXXPgHpSG8Ig1RtTDGfdCJu4UT-AQPC43FGBOptIVOg,9484
89
89
  mlrun/datastore/datastore_profile.py,sha256=RRpb5TfTDBOnZQGSr6Zlmi1QSPHRDssBlWGLIpNBHM0,23860
90
90
  mlrun/datastore/dbfs_store.py,sha256=QkDRzwFnvm7CgEg4NuGxes6tBgKDyhX0CiBUvK8c9pk,6568
91
91
  mlrun/datastore/filestore.py,sha256=OcykjzhbUAZ6_Cb9bGAXRL2ngsOpxXSb4rR0lyogZtM,3773
@@ -110,7 +110,7 @@ mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
110
110
  mlrun/db/auth_utils.py,sha256=hpg8D2r82oN0BWabuWN04BTNZ7jYMAF242YSUpK7LFM,5211
111
111
  mlrun/db/base.py,sha256=4ILHlN9vMw3n78qiiTJ1997ykgDKKqxDkLl7lHVVKJg,30814
112
112
  mlrun/db/factory.py,sha256=yP2vVmveUE7LYTCHbS6lQIxP9rW--zdISWuPd_I3d_4,2111
113
- mlrun/db/httpdb.py,sha256=if4W3yiBq3NMjzQU2GdS1ToBnKGxj601T-sIS2zx1og,232337
113
+ mlrun/db/httpdb.py,sha256=y_6uf3hTwZky1KOgANS4bbx_WxYMQNDrdMkF130rdVQ,232653
114
114
  mlrun/db/nopdb.py,sha256=4TujePdRef5WpZY-TiGL9BmXphilNAypKREiGnqnKtg,27196
115
115
  mlrun/feature_store/__init__.py,sha256=SlI845bWt6xX34SXunHHqhmFAR9-5v2ak8N-qpcAPGo,1328
116
116
  mlrun/feature_store/api.py,sha256=qKj5Tk6prTab6XWatWhBuPRVp0eJEctoxRMN2wz48vA,32168
@@ -237,15 +237,16 @@ mlrun/model_monitoring/db/__init__.py,sha256=r47xPGZpIfMuv8J3PQCZTSqVPMhUta4sSJC
237
237
  mlrun/model_monitoring/db/_schedules.py,sha256=RWn4wtKsIXg668gMLpxO9I8GlkxvPSaA5y7w-wFDcgE,9048
238
238
  mlrun/model_monitoring/db/_stats.py,sha256=VVMWLMqG3Us3ozBkLaokJF22Ewv8WKmVE1-OvS_g9vA,6943
239
239
  mlrun/model_monitoring/db/tsdb/__init__.py,sha256=4S86V_Ot_skE16SLkw0WwsaAUB0ECH6SoJdp-TIu6s8,4645
240
- mlrun/model_monitoring/db/tsdb/base.py,sha256=g3IYIu45F296JDLaAedZw2h-vvsQPsEsYzALfT7_d60,26943
240
+ mlrun/model_monitoring/db/tsdb/base.py,sha256=mvV9S_adfKaAObzT2w6m4ko30UnRxPrh30eL0dshVyA,26914
241
241
  mlrun/model_monitoring/db/tsdb/helpers.py,sha256=0oUXc4aUkYtP2SGP6jTb3uPPKImIUsVsrb9otX9a7O4,1189
242
242
  mlrun/model_monitoring/db/tsdb/tdengine/__init__.py,sha256=vgBdsKaXUURKqIf3M0y4sRatmSVA4CQiJs7J5dcVBkQ,620
243
243
  mlrun/model_monitoring/db/tsdb/tdengine/schemas.py,sha256=EslhaR65jfeNdD5Ibk-3Hb4e5r5qYPfHb9rTChX3sG0,12689
244
244
  mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py,sha256=Uadj0UvAmln2MxDWod-kAzau1uNlqZh981rPhbUH_5M,2857
245
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=utQsSsTWKLku30sXqq-ZbSP5yP2dghAiOkbVufK3aoQ,38797
245
+ mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py,sha256=8xo2O_yQrJGNDoYYB3Bwtdwwvzs3U9dT3BtPot0zENQ,6449
246
+ mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=h0ZrNgOwTlBRd_DaYDc6eeVM9f_8CLJMUPEAIrZpbyU,37803
246
247
  mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
247
248
  mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=_-zo9relCDtjGgievxAcAP9gVN9nDWs8BzGtFwTjb9M,6284
248
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=YltUZBPNJbaqOQljiOcVBZBjSYpGxIPcnA7tUiLqvr8,46806
249
+ mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=aDdyHWJLOG-3EgPTJgbEzOOQf5NetpFQk_HdQbs5_zI,47160
249
250
  mlrun/model_monitoring/metrics/__init__.py,sha256=6CsTXAxeLbbf8yfCADTaxmiavqwrLEdYFJ-qc5kgDAY,569
250
251
  mlrun/model_monitoring/metrics/histogram_distance.py,sha256=E9_WIl2vd6qNvoHVHoFcnuQk3ekbFWOdi8aU7sHrfk4,4724
251
252
  mlrun/package/__init__.py,sha256=v7VDyK9kDOOuDvFo4oiGV2fx-vM1KL7fdN9pGLakhUQ,7008
@@ -270,7 +271,7 @@ mlrun/platforms/iguazio.py,sha256=6VBTq8eQ3mzT96tzjYhAtcMQ2VjF4x8LpIPW5DAcX2Q,13
270
271
  mlrun/projects/__init__.py,sha256=0Krf0WIKfnZa71WthYOg0SoaTodGg3sV_hK3f_OlTPI,1220
271
272
  mlrun/projects/operations.py,sha256=TzPbTYBgmYrjxTKP_wOtBJYFFFwDCQtaVvF1Snr0TfM,20029
272
273
  mlrun/projects/pipelines.py,sha256=wud7ezeEmhIJvfYE_wzQbA4ygEfGXHtbOtoOpan6poY,48556
273
- mlrun/projects/project.py,sha256=COaV9EQTmoE6_gatZFqaxaYTqdU-CWDaED7i-BATU7c,236260
274
+ mlrun/projects/project.py,sha256=QdxXu66Oo-NIrBmzccVdolI-38P-i3YOOwJ2dozbaYA,236617
274
275
  mlrun/runtimes/__init__.py,sha256=J9Sy2HiyMlztNv6VUurMzF5H2XzttNil8nRsWDsqLyg,8923
275
276
  mlrun/runtimes/base.py,sha256=EL14Kmc1vWEjnBPJwLj5hHC6CtRAQHJLmohCD3sFEHo,37855
276
277
  mlrun/runtimes/daskjob.py,sha256=JwuGvOiPsxEDHHMMUS4Oie4hLlYYIZwihAl6DjroTY0,19521
@@ -279,7 +280,7 @@ mlrun/runtimes/function_reference.py,sha256=fnMKUEieKgy4JyVLhFpDtr6JvKgOaQP8F_K2
279
280
  mlrun/runtimes/generators.py,sha256=X8NDlCEPveDDPOHtOGcSpbl3pAVM3DP7fuPj5xVhxEY,7290
280
281
  mlrun/runtimes/kubejob.py,sha256=K-nR3J0-S3Em6Ez-JD0BxHczobQhC4m0829HLdSwX8g,8797
281
282
  mlrun/runtimes/local.py,sha256=yedo3R1c46cB1mX7aOz8zORXswQPvX86U-_fYxXoqTY,22717
282
- mlrun/runtimes/mounts.py,sha256=pGQlnsNTUxAhFMWLS_53E784z-IH9a6oQjKjSp1gbJE,18733
283
+ mlrun/runtimes/mounts.py,sha256=2dkoktm3TXHe4XHmRhvC0UfvWzq2vy_13MeaW7wgyPo,18735
283
284
  mlrun/runtimes/pod.py,sha256=kjnDKOQKqfmprzA3tbXhaB58Dp6So4cOApcjYZ3kVko,67691
284
285
  mlrun/runtimes/remotesparkjob.py,sha256=dod99nqz3GdRfmnBoQKfwFCXTetfuCScd2pKH3HJyoY,7394
285
286
  mlrun/runtimes/utils.py,sha256=3_Vu_OHlhi8f0vh_w9ii2eTKgS5dh6RVi1HwX9oDKuU,15675
@@ -340,11 +341,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
340
341
  mlrun/utils/notifications/notification/slack.py,sha256=eQvmctTh6wIG5xVOesLLV9S1-UUCu5UEQ9JIJOor3ts,7183
341
342
  mlrun/utils/notifications/notification/webhook.py,sha256=zxh8CAlbPnTazsk6r05X5TKwqUZVOH5KBU2fJbzQlG4,5330
342
343
  mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
343
- mlrun/utils/version/version.json,sha256=uGWC8p0f3GV14zi6vIEuExlx7M_nQbyFGSfe0CccBo0,89
344
+ mlrun/utils/version/version.json,sha256=-oZ5mNoaaByxQfUDsIjBIlipDjs0TE2TBsoWs90b8lk,89
344
345
  mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
345
- mlrun-1.8.0rc55.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
346
- mlrun-1.8.0rc55.dist-info/METADATA,sha256=yAY1-rwKAxNl2yJh7mfkg4z5rrgQ9XBPWPztSLswSNc,26009
347
- mlrun-1.8.0rc55.dist-info/WHEEL,sha256=ooBFpIzZCPdw3uqIQsOo4qqbA4ZRPxHnOH7peeONza0,91
348
- mlrun-1.8.0rc55.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
349
- mlrun-1.8.0rc55.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
350
- mlrun-1.8.0rc55.dist-info/RECORD,,
346
+ mlrun-1.8.0rc57.dist-info/licenses/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
347
+ mlrun-1.8.0rc57.dist-info/METADATA,sha256=yINJv08j-HiR8lfdVnt_vlw_vgLKwgUYnOkgNUd8c6o,25797
348
+ mlrun-1.8.0rc57.dist-info/WHEEL,sha256=0CuiUZ_p9E4cD6NyLD6UG80LBXYyiSYZOKDm5lp32xk,91
349
+ mlrun-1.8.0rc57.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
350
+ mlrun-1.8.0rc57.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
351
+ mlrun-1.8.0rc57.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.0.1)
2
+ Generator: setuptools (80.3.1)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5