mlrun 1.10.0rc10__py3-none-any.whl → 1.10.0rc11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/artifacts/manager.py +1 -1
- mlrun/common/constants.py +11 -0
- mlrun/common/schemas/model_monitoring/__init__.py +2 -0
- mlrun/common/schemas/model_monitoring/functions.py +2 -0
- mlrun/common/schemas/model_monitoring/model_endpoints.py +19 -1
- mlrun/common/schemas/serving.py +1 -0
- mlrun/common/schemas/workflow.py +3 -2
- mlrun/datastore/azure_blob.py +1 -1
- mlrun/datastore/base.py +4 -2
- mlrun/datastore/datastore.py +46 -14
- mlrun/datastore/google_cloud_storage.py +1 -1
- mlrun/datastore/s3.py +16 -5
- mlrun/datastore/sources.py +2 -2
- mlrun/datastore/targets.py +2 -2
- mlrun/db/__init__.py +0 -1
- mlrun/db/base.py +12 -0
- mlrun/db/httpdb.py +35 -0
- mlrun/db/nopdb.py +10 -0
- mlrun/execution.py +12 -0
- mlrun/frameworks/tf_keras/mlrun_interface.py +7 -18
- mlrun/launcher/base.py +1 -0
- mlrun/launcher/client.py +1 -0
- mlrun/launcher/local.py +4 -0
- mlrun/model.py +15 -4
- mlrun/model_monitoring/applications/base.py +74 -56
- mlrun/model_monitoring/db/tsdb/base.py +52 -19
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +179 -11
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +26 -11
- mlrun/model_monitoring/helpers.py +48 -0
- mlrun/projects/pipelines.py +12 -3
- mlrun/projects/project.py +30 -0
- mlrun/runtimes/daskjob.py +2 -0
- mlrun/runtimes/kubejob.py +4 -0
- mlrun/runtimes/mpijob/abstract.py +2 -0
- mlrun/runtimes/mpijob/v1.py +2 -0
- mlrun/runtimes/nuclio/function.py +2 -0
- mlrun/runtimes/nuclio/serving.py +59 -0
- mlrun/runtimes/pod.py +3 -0
- mlrun/runtimes/remotesparkjob.py +2 -0
- mlrun/runtimes/sparkjob/spark3job.py +2 -0
- mlrun/serving/server.py +97 -3
- mlrun/serving/states.py +146 -38
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/METADATA +13 -6
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/RECORD +49 -51
- mlrun/db/sql_types.py +0 -160
- mlrun/utils/db.py +0 -71
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/WHEEL +0 -0
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/entry_points.txt +0 -0
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.10.0rc10.dist-info → mlrun-1.10.0rc11.dist-info}/top_level.txt +0 -0
mlrun/serving/states.py
CHANGED
|
@@ -20,6 +20,7 @@ __all__ = [
|
|
|
20
20
|
"MonitoringApplicationStep",
|
|
21
21
|
]
|
|
22
22
|
|
|
23
|
+
import inspect
|
|
23
24
|
import os
|
|
24
25
|
import pathlib
|
|
25
26
|
import traceback
|
|
@@ -29,6 +30,7 @@ from inspect import getfullargspec, signature
|
|
|
29
30
|
from typing import Any, Optional, Union, cast
|
|
30
31
|
|
|
31
32
|
import storey.utils
|
|
33
|
+
from storey import ParallelExecutionMechanisms
|
|
32
34
|
|
|
33
35
|
import mlrun
|
|
34
36
|
import mlrun.artifacts
|
|
@@ -79,6 +81,7 @@ class StepKinds:
|
|
|
79
81
|
root = "root"
|
|
80
82
|
error_step = "error_step"
|
|
81
83
|
monitoring_application = "monitoring_application"
|
|
84
|
+
model_runner = "model_runner"
|
|
82
85
|
|
|
83
86
|
|
|
84
87
|
_task_step_fields = [
|
|
@@ -1002,7 +1005,9 @@ class RouterStep(TaskStep):
|
|
|
1002
1005
|
)
|
|
1003
1006
|
|
|
1004
1007
|
|
|
1005
|
-
class Model(storey.ParallelExecutionRunnable):
|
|
1008
|
+
class Model(storey.ParallelExecutionRunnable, ModelObj):
|
|
1009
|
+
_dict_fields = ["name", "raise_exception", "artifact_uri"]
|
|
1010
|
+
|
|
1006
1011
|
def __init__(
|
|
1007
1012
|
self,
|
|
1008
1013
|
name: str,
|
|
@@ -1015,6 +1020,14 @@ class Model(storey.ParallelExecutionRunnable):
|
|
|
1015
1020
|
raise MLRunInvalidArgumentError("'artifact_uri' argument must be a string")
|
|
1016
1021
|
self.artifact_uri = artifact_uri
|
|
1017
1022
|
|
|
1023
|
+
def __init_subclass__(cls):
|
|
1024
|
+
super().__init_subclass__()
|
|
1025
|
+
cls._dict_fields = list(
|
|
1026
|
+
set(cls._dict_fields)
|
|
1027
|
+
| set(inspect.signature(cls.__init__).parameters.keys())
|
|
1028
|
+
)
|
|
1029
|
+
cls._dict_fields.remove("self")
|
|
1030
|
+
|
|
1018
1031
|
def load(self) -> None:
|
|
1019
1032
|
"""Override to load model if needed."""
|
|
1020
1033
|
pass
|
|
@@ -1170,7 +1183,7 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1170
1183
|
example::
|
|
1171
1184
|
|
|
1172
1185
|
model_runner_step = ModelRunnerStep(name="my_model_runner")
|
|
1173
|
-
model_runner_step.add_model(MyModel(name="my_model"))
|
|
1186
|
+
model_runner_step.add_model(..., model_class=MyModel(name="my_model"))
|
|
1174
1187
|
graph.to(model_runner_step)
|
|
1175
1188
|
|
|
1176
1189
|
:param model_selector: ModelSelector instance whose select() method will be used to select models to run on each
|
|
@@ -1203,11 +1216,13 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1203
1216
|
**kwargs,
|
|
1204
1217
|
)
|
|
1205
1218
|
self.raise_exception = raise_exception
|
|
1219
|
+
self.shape = "folder"
|
|
1206
1220
|
|
|
1207
1221
|
def add_model(
|
|
1208
1222
|
self,
|
|
1209
1223
|
endpoint_name: str,
|
|
1210
|
-
model_class: str,
|
|
1224
|
+
model_class: Union[str, Model],
|
|
1225
|
+
execution_mechanism: Union[str, ParallelExecutionMechanisms],
|
|
1211
1226
|
model_artifact: Optional[Union[str, mlrun.artifacts.ModelArtifact]] = None,
|
|
1212
1227
|
labels: Optional[Union[list[str], dict[str, str]]] = None,
|
|
1213
1228
|
creation_strategy: Optional[
|
|
@@ -1225,34 +1240,63 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1225
1240
|
|
|
1226
1241
|
:param endpoint_name: str, will identify the model in the ModelRunnerStep, and assign model endpoint name
|
|
1227
1242
|
:param model_class: Model class name
|
|
1228
|
-
:param
|
|
1229
|
-
|
|
1230
|
-
|
|
1231
|
-
|
|
1232
|
-
|
|
1233
|
-
|
|
1234
|
-
*
|
|
1235
|
-
|
|
1236
|
-
|
|
1237
|
-
|
|
1238
|
-
|
|
1239
|
-
|
|
1240
|
-
|
|
1241
|
-
|
|
1242
|
-
|
|
1243
|
-
|
|
1244
|
-
|
|
1245
|
-
|
|
1246
|
-
|
|
1247
|
-
|
|
1248
|
-
|
|
1249
|
-
|
|
1250
|
-
|
|
1251
|
-
|
|
1252
|
-
|
|
1243
|
+
:param execution_mechanism: Parallel execution mechanism to be used to execute this model. Must be one of:
|
|
1244
|
+
* "process_pool" – To run in a separate process from a process pool. This is appropriate for CPU or GPU
|
|
1245
|
+
intensive tasks as they would otherwise block the main process by holding Python's Global Interpreter
|
|
1246
|
+
Lock (GIL).
|
|
1247
|
+
* "dedicated_process" – To run in a separate dedicated process. This is appropriate for CPU or GPU intensive
|
|
1248
|
+
tasks that also require significant Runnable-specific initialization (e.g. a large model).
|
|
1249
|
+
* "thread_pool" – To run in a separate thread. This is appropriate for blocking I/O tasks, as they would
|
|
1250
|
+
otherwise block the main event loop thread.
|
|
1251
|
+
* "asyncio" – To run in an asyncio task. This is appropriate for I/O tasks that use asyncio, allowing the
|
|
1252
|
+
event loop to continue running while waiting for a response.
|
|
1253
|
+
* "shared_executor" – Reuses an external executor (typically managed by the flow or context) to execute the
|
|
1254
|
+
runnable. Should be used only if you have multiply `ParallelExecution` in the same flow and especially
|
|
1255
|
+
useful when:
|
|
1256
|
+
- You want to share a heavy resource like a large model loaded onto a GPU.
|
|
1257
|
+
- You want to centralize task scheduling or coordination for multiple lightweight tasks.
|
|
1258
|
+
- You aim to minimize overhead from creating new executors or processes/threads per runnable.
|
|
1259
|
+
The runnable is expected to be pre-initialized and reused across events, enabling efficient use of
|
|
1260
|
+
memory and hardware accelerators.
|
|
1261
|
+
* "naive" – To run in the main event loop. This is appropriate only for trivial computation and/or file I/O.
|
|
1262
|
+
It means that the runnable will not actually be run in parallel to anything else.
|
|
1263
|
+
|
|
1264
|
+
:param model_artifact: model artifact or mlrun model artifact uri
|
|
1265
|
+
:param labels: model endpoint labels, should be list of str or mapping of str:str
|
|
1266
|
+
:param creation_strategy: Strategy for creating or updating the model endpoint:
|
|
1267
|
+
* **overwrite**:
|
|
1268
|
+
1. If model endpoints with the same name exist, delete the `latest` one.
|
|
1269
|
+
2. Create a new model endpoint entry and set it as `latest`.
|
|
1270
|
+
* **inplace** (default):
|
|
1271
|
+
1. If model endpoints with the same name exist, update the `latest` entry.
|
|
1272
|
+
2. Otherwise, create a new entry.
|
|
1273
|
+
* **archive**:
|
|
1274
|
+
1. If model endpoints with the same name exist, preserve them.
|
|
1275
|
+
2. Create a new model endpoint with the same name and set it to `latest`.
|
|
1276
|
+
|
|
1277
|
+
:param inputs: list of the model inputs (e.g. features) ,if provided will override the inputs
|
|
1278
|
+
that been configured in the model artifact, please note that those inputs need to
|
|
1279
|
+
be equal in length and order to the inputs that model_class predict method expects
|
|
1280
|
+
:param outputs: list of the model outputs (e.g. labels) ,if provided will override the outputs
|
|
1281
|
+
that been configured in the model artifact, please note that those outputs need to
|
|
1282
|
+
be equal to the model_class predict method outputs (length, and order)
|
|
1283
|
+
:param input_path: input path inside the user event, expect scopes to be defined by dot notation
|
|
1284
|
+
(e.g "inputs.my_model_inputs"). expects list or dictionary type object in path.
|
|
1285
|
+
:param result_path: result path inside the user output event, expect scopes to be defined by dot
|
|
1286
|
+
notation (e.g "outputs.my_model_outputs") expects list or dictionary type object
|
|
1287
|
+
in path.
|
|
1288
|
+
:param override: bool allow override existing model on the current ModelRunnerStep.
|
|
1289
|
+
:param model_parameters: Parameters for model instantiation
|
|
1253
1290
|
"""
|
|
1254
|
-
|
|
1255
|
-
|
|
1291
|
+
|
|
1292
|
+
if isinstance(model_class, Model) and model_parameters:
|
|
1293
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
1294
|
+
"Cannot provide a model object as argument to `model_class` and also provide `model_parameters`."
|
|
1295
|
+
)
|
|
1296
|
+
|
|
1297
|
+
model_parameters = model_parameters or (
|
|
1298
|
+
model_class.to_dict() if isinstance(model_class, Model) else {}
|
|
1299
|
+
)
|
|
1256
1300
|
if outputs is None and isinstance(
|
|
1257
1301
|
model_artifact, mlrun.artifacts.ModelArtifact
|
|
1258
1302
|
):
|
|
@@ -1265,7 +1309,9 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1265
1309
|
model_parameters["artifact_uri"] = model_parameters.get(
|
|
1266
1310
|
"artifact_uri", model_artifact
|
|
1267
1311
|
)
|
|
1268
|
-
if model_parameters.get("name", endpoint_name) != endpoint_name
|
|
1312
|
+
if model_parameters.get("name", endpoint_name) != endpoint_name or (
|
|
1313
|
+
isinstance(model_class, Model) and model_class.name != endpoint_name
|
|
1314
|
+
):
|
|
1269
1315
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
1270
1316
|
"Inconsistent name for model added to ModelRunnerStep."
|
|
1271
1317
|
)
|
|
@@ -1275,11 +1321,26 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1275
1321
|
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
1276
1322
|
f"Model with name {endpoint_name} already exists in this ModelRunnerStep."
|
|
1277
1323
|
)
|
|
1324
|
+
ParallelExecutionMechanisms.validate(execution_mechanism)
|
|
1325
|
+
self.class_args[schemas.ModelRunnerStepData.MODEL_TO_EXECUTION_MECHANISM] = (
|
|
1326
|
+
self.class_args.get(
|
|
1327
|
+
schemas.ModelRunnerStepData.MODEL_TO_EXECUTION_MECHANISM,
|
|
1328
|
+
{},
|
|
1329
|
+
)
|
|
1330
|
+
)
|
|
1331
|
+
self.class_args[schemas.ModelRunnerStepData.MODEL_TO_EXECUTION_MECHANISM][
|
|
1332
|
+
endpoint_name
|
|
1333
|
+
] = execution_mechanism
|
|
1278
1334
|
|
|
1279
1335
|
model_parameters["name"] = endpoint_name
|
|
1280
1336
|
monitoring_data = self.class_args.get(
|
|
1281
1337
|
schemas.ModelRunnerStepData.MONITORING_DATA, {}
|
|
1282
1338
|
)
|
|
1339
|
+
model_class = (
|
|
1340
|
+
model_class
|
|
1341
|
+
if isinstance(model_class, str)
|
|
1342
|
+
else model_class.__class__.__name__
|
|
1343
|
+
)
|
|
1283
1344
|
models[endpoint_name] = (model_class, model_parameters)
|
|
1284
1345
|
monitoring_data[endpoint_name] = {
|
|
1285
1346
|
schemas.MonitoringData.INPUTS: inputs,
|
|
@@ -1347,22 +1408,23 @@ class ModelRunnerStep(MonitoredStep):
|
|
|
1347
1408
|
|
|
1348
1409
|
def init_object(self, context, namespace, mode="sync", reset=False, **extra_kwargs):
|
|
1349
1410
|
model_selector = self.class_args.get("model_selector")
|
|
1411
|
+
execution_mechanism_by_model_name = self.class_args.get(
|
|
1412
|
+
schemas.ModelRunnerStepData.MODEL_TO_EXECUTION_MECHANISM
|
|
1413
|
+
)
|
|
1350
1414
|
models = self.class_args.get(schemas.ModelRunnerStepData.MODELS, {})
|
|
1351
1415
|
if isinstance(model_selector, str):
|
|
1352
1416
|
model_selector = get_class(model_selector, namespace)()
|
|
1353
1417
|
model_objects = []
|
|
1354
1418
|
for model, model_params in models.values():
|
|
1355
|
-
|
|
1356
|
-
|
|
1357
|
-
|
|
1358
|
-
|
|
1359
|
-
else:
|
|
1360
|
-
# prevent model predict from raising error
|
|
1361
|
-
model._raise_exception = False
|
|
1419
|
+
model = get_class(model, namespace).from_dict(
|
|
1420
|
+
model_params, init_with_params=True
|
|
1421
|
+
)
|
|
1422
|
+
model._raise_exception = False
|
|
1362
1423
|
model_objects.append(model)
|
|
1363
1424
|
self._async_object = ModelRunner(
|
|
1364
1425
|
model_selector=model_selector,
|
|
1365
1426
|
runnables=model_objects,
|
|
1427
|
+
execution_mechanism_by_runnable_name=execution_mechanism_by_model_name,
|
|
1366
1428
|
name=self.name,
|
|
1367
1429
|
context=context,
|
|
1368
1430
|
)
|
|
@@ -2048,6 +2110,7 @@ class RootFlowStep(FlowStep):
|
|
|
2048
2110
|
"on_error",
|
|
2049
2111
|
"model_endpoints_names",
|
|
2050
2112
|
"model_endpoints_routes_names",
|
|
2113
|
+
"track_models",
|
|
2051
2114
|
]
|
|
2052
2115
|
|
|
2053
2116
|
def __init__(
|
|
@@ -2067,6 +2130,7 @@ class RootFlowStep(FlowStep):
|
|
|
2067
2130
|
)
|
|
2068
2131
|
self._models = set()
|
|
2069
2132
|
self._route_models = set()
|
|
2133
|
+
self._track_models = False
|
|
2070
2134
|
|
|
2071
2135
|
@property
|
|
2072
2136
|
def model_endpoints_names(self) -> list[str]:
|
|
@@ -2087,6 +2151,14 @@ class RootFlowStep(FlowStep):
|
|
|
2087
2151
|
def model_endpoints_routes_names(self, models: list[str]):
|
|
2088
2152
|
self._route_models = set(models)
|
|
2089
2153
|
|
|
2154
|
+
@property
|
|
2155
|
+
def track_models(self):
|
|
2156
|
+
return self._track_models
|
|
2157
|
+
|
|
2158
|
+
@track_models.setter
|
|
2159
|
+
def track_models(self, track_models: bool):
|
|
2160
|
+
self._track_models = track_models
|
|
2161
|
+
|
|
2090
2162
|
def update_model_endpoints_routes_names(self, model_endpoints_names: list):
|
|
2091
2163
|
self._route_models.update(model_endpoints_names)
|
|
2092
2164
|
|
|
@@ -2132,6 +2204,40 @@ def _add_graphviz_router(graph, step, source=None, **kwargs):
|
|
|
2132
2204
|
graph.edge(step.fullname, route.fullname)
|
|
2133
2205
|
|
|
2134
2206
|
|
|
2207
|
+
def _add_graphviz_model_runner(graph, step, source=None):
|
|
2208
|
+
if source:
|
|
2209
|
+
graph.node("_start", source.name, shape=source.shape, style="filled")
|
|
2210
|
+
graph.edge("_start", step.fullname)
|
|
2211
|
+
|
|
2212
|
+
is_monitored = step._extract_root_step().track_models
|
|
2213
|
+
m_cell = '<FONT POINT-SIZE="9">🄼</FONT>' if is_monitored else ""
|
|
2214
|
+
|
|
2215
|
+
number_of_models = len(
|
|
2216
|
+
list(step.class_args.get(schemas.ModelRunnerStepData.MODELS, {}).keys())
|
|
2217
|
+
)
|
|
2218
|
+
number_badge = f"""
|
|
2219
|
+
<TABLE BORDER="0" CELLBORDER="0" CELLSPACING="0" BGCOLOR="black" CELLPADDING="2">
|
|
2220
|
+
<TR>
|
|
2221
|
+
<TD><FONT COLOR="white" POINT-SIZE="9"><B>{number_of_models}</B></FONT></TD>
|
|
2222
|
+
</TR>
|
|
2223
|
+
</TABLE>
|
|
2224
|
+
"""
|
|
2225
|
+
|
|
2226
|
+
html_label = f"""<
|
|
2227
|
+
<TABLE BORDER="0" CELLBORDER="0" CELLSPACING="0" CELLPADDING="4">
|
|
2228
|
+
<TR>
|
|
2229
|
+
<TD ALIGN="LEFT">{m_cell}</TD>
|
|
2230
|
+
<TD ALIGN="RIGHT">{number_badge}</TD>
|
|
2231
|
+
</TR>
|
|
2232
|
+
<TR>
|
|
2233
|
+
<TD COLSPAN="2" ALIGN="CENTER"><FONT POINT-SIZE="14">{step.name}</FONT></TD>
|
|
2234
|
+
</TR>
|
|
2235
|
+
</TABLE>
|
|
2236
|
+
>"""
|
|
2237
|
+
|
|
2238
|
+
graph.node(step.fullname, label=html_label, shape=step.get_shape())
|
|
2239
|
+
|
|
2240
|
+
|
|
2135
2241
|
def _add_graphviz_flow(
|
|
2136
2242
|
graph,
|
|
2137
2243
|
step,
|
|
@@ -2149,6 +2255,8 @@ def _add_graphviz_flow(
|
|
|
2149
2255
|
if kind == StepKinds.router:
|
|
2150
2256
|
with graph.subgraph(name="cluster_" + child.fullname) as sg:
|
|
2151
2257
|
_add_graphviz_router(sg, child)
|
|
2258
|
+
elif kind == StepKinds.model_runner:
|
|
2259
|
+
_add_graphviz_model_runner(graph, child)
|
|
2152
2260
|
else:
|
|
2153
2261
|
graph.node(child.fullname, label=child.name, shape=child.get_shape())
|
|
2154
2262
|
_add_edges(child.after or [], step, graph, child)
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mlrun
|
|
3
|
-
Version: 1.10.
|
|
3
|
+
Version: 1.10.0rc11
|
|
4
4
|
Summary: Tracking and config of machine learning runs
|
|
5
5
|
Home-page: https://github.com/mlrun/mlrun
|
|
6
6
|
Author: Yaron Haviv
|
|
@@ -44,15 +44,15 @@ Requires-Dist: semver~=3.0
|
|
|
44
44
|
Requires-Dist: dependency-injector~=4.41
|
|
45
45
|
Requires-Dist: fsspec<2024.7,>=2023.9.2
|
|
46
46
|
Requires-Dist: v3iofs~=0.1.17
|
|
47
|
-
Requires-Dist: storey~=1.10.
|
|
47
|
+
Requires-Dist: storey~=1.10.7
|
|
48
48
|
Requires-Dist: inflection~=0.5.0
|
|
49
49
|
Requires-Dist: python-dotenv~=1.0
|
|
50
50
|
Requires-Dist: setuptools>=75.2
|
|
51
51
|
Requires-Dist: deprecated~=1.2
|
|
52
52
|
Requires-Dist: jinja2>=3.1.6,~=3.1
|
|
53
53
|
Requires-Dist: orjson<4,>=3.9.15
|
|
54
|
-
Requires-Dist: mlrun-pipelines-kfp-common~=0.5.
|
|
55
|
-
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.
|
|
54
|
+
Requires-Dist: mlrun-pipelines-kfp-common~=0.5.8
|
|
55
|
+
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7
|
|
56
56
|
Requires-Dist: docstring_parser~=0.16
|
|
57
57
|
Requires-Dist: aiosmtplib~=3.0
|
|
58
58
|
Provides-Extra: s3
|
|
@@ -101,6 +101,10 @@ Provides-Extra: tdengine
|
|
|
101
101
|
Requires-Dist: taos-ws-py==0.3.2; extra == "tdengine"
|
|
102
102
|
Provides-Extra: snowflake
|
|
103
103
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "snowflake"
|
|
104
|
+
Provides-Extra: openai
|
|
105
|
+
Requires-Dist: openai~=1.88; extra == "openai"
|
|
106
|
+
Provides-Extra: dev-postgres
|
|
107
|
+
Requires-Dist: pytest-mock-resources[postgres]~=2.12; extra == "dev-postgres"
|
|
104
108
|
Provides-Extra: kfp18
|
|
105
109
|
Requires-Dist: mlrun_pipelines_kfp_v1_8[kfp]>=0.5.0; python_version < "3.11" and extra == "kfp18"
|
|
106
110
|
Provides-Extra: api
|
|
@@ -120,7 +124,7 @@ Requires-Dist: timelength~=1.1; extra == "api"
|
|
|
120
124
|
Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "api"
|
|
121
125
|
Requires-Dist: aiosmtplib~=3.0; extra == "api"
|
|
122
126
|
Requires-Dist: pydantic<2,>=1; extra == "api"
|
|
123
|
-
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.
|
|
127
|
+
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7; extra == "api"
|
|
124
128
|
Requires-Dist: grpcio~=1.70.0; extra == "api"
|
|
125
129
|
Provides-Extra: all
|
|
126
130
|
Requires-Dist: adlfs==2023.9.0; extra == "all"
|
|
@@ -144,6 +148,7 @@ Requires-Dist: graphviz~=0.20.0; extra == "all"
|
|
|
144
148
|
Requires-Dist: kafka-python~=2.1.0; extra == "all"
|
|
145
149
|
Requires-Dist: mlflow~=2.22; extra == "all"
|
|
146
150
|
Requires-Dist: msrest~=0.6.21; extra == "all"
|
|
151
|
+
Requires-Dist: openai~=1.88; extra == "all"
|
|
147
152
|
Requires-Dist: oss2==2.18.1; extra == "all"
|
|
148
153
|
Requires-Dist: ossfs==2023.12.0; extra == "all"
|
|
149
154
|
Requires-Dist: plotly~=5.23; extra == "all"
|
|
@@ -175,6 +180,7 @@ Requires-Dist: graphviz~=0.20.0; extra == "complete"
|
|
|
175
180
|
Requires-Dist: kafka-python~=2.1.0; extra == "complete"
|
|
176
181
|
Requires-Dist: mlflow~=2.22; extra == "complete"
|
|
177
182
|
Requires-Dist: msrest~=0.6.21; extra == "complete"
|
|
183
|
+
Requires-Dist: openai~=1.88; extra == "complete"
|
|
178
184
|
Requires-Dist: oss2==2.18.1; extra == "complete"
|
|
179
185
|
Requires-Dist: ossfs==2023.12.0; extra == "complete"
|
|
180
186
|
Requires-Dist: plotly~=5.23; extra == "complete"
|
|
@@ -214,9 +220,10 @@ Requires-Dist: igz-mgmt~=0.4.1; extra == "complete-api"
|
|
|
214
220
|
Requires-Dist: kafka-python~=2.1.0; extra == "complete-api"
|
|
215
221
|
Requires-Dist: memray~=1.12; sys_platform != "win32" and extra == "complete-api"
|
|
216
222
|
Requires-Dist: mlflow~=2.22; extra == "complete-api"
|
|
217
|
-
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.
|
|
223
|
+
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7; extra == "complete-api"
|
|
218
224
|
Requires-Dist: msrest~=0.6.21; extra == "complete-api"
|
|
219
225
|
Requires-Dist: objgraph~=3.6; extra == "complete-api"
|
|
226
|
+
Requires-Dist: openai~=1.88; extra == "complete-api"
|
|
220
227
|
Requires-Dist: oss2==2.18.1; extra == "complete-api"
|
|
221
228
|
Requires-Dist: ossfs==2023.12.0; extra == "complete-api"
|
|
222
229
|
Requires-Dist: plotly~=5.23; extra == "complete-api"
|