mlrun 1.10.0rc24__py3-none-any.whl → 1.10.0rc26__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/artifacts/llm_prompt.py +8 -1
- mlrun/common/model_monitoring/helpers.py +86 -0
- mlrun/common/schemas/hub.py +25 -18
- mlrun/common/schemas/model_monitoring/constants.py +1 -0
- mlrun/common/schemas/model_monitoring/model_endpoints.py +10 -1
- mlrun/config.py +2 -3
- mlrun/datastore/__init__.py +2 -2
- mlrun/datastore/azure_blob.py +66 -43
- mlrun/datastore/datastore_profile.py +35 -5
- mlrun/datastore/model_provider/huggingface_provider.py +122 -30
- mlrun/datastore/model_provider/model_provider.py +62 -4
- mlrun/datastore/model_provider/openai_provider.py +114 -43
- mlrun/datastore/s3.py +24 -2
- mlrun/datastore/storeytargets.py +2 -3
- mlrun/db/base.py +15 -1
- mlrun/db/httpdb.py +17 -6
- mlrun/db/nopdb.py +14 -0
- mlrun/k8s_utils.py +0 -14
- mlrun/model_monitoring/api.py +2 -2
- mlrun/model_monitoring/applications/base.py +37 -10
- mlrun/model_monitoring/applications/context.py +1 -4
- mlrun/model_monitoring/controller.py +15 -5
- mlrun/model_monitoring/db/_schedules.py +2 -4
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +3 -1
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +17 -4
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +3 -0
- mlrun/model_monitoring/helpers.py +5 -5
- mlrun/platforms/iguazio.py +7 -3
- mlrun/projects/project.py +33 -29
- mlrun/runtimes/base.py +0 -3
- mlrun/runtimes/mounts.py +15 -2
- mlrun/runtimes/nuclio/__init__.py +1 -0
- mlrun/runtimes/nuclio/application/application.py +11 -2
- mlrun/runtimes/nuclio/function.py +10 -0
- mlrun/runtimes/nuclio/serving.py +4 -0
- mlrun/runtimes/pod.py +153 -11
- mlrun/runtimes/utils.py +22 -5
- mlrun/serving/routers.py +23 -41
- mlrun/serving/server.py +26 -14
- mlrun/serving/states.py +3 -3
- mlrun/serving/system_steps.py +52 -29
- mlrun/serving/v2_serving.py +9 -10
- mlrun/utils/helpers.py +5 -2
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/METADATA +24 -23
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/RECORD +50 -50
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/WHEEL +0 -0
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/entry_points.txt +0 -0
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.10.0rc24.dist-info → mlrun-1.10.0rc26.dist-info}/top_level.txt +0 -0
mlrun/serving/server.py
CHANGED
|
@@ -33,9 +33,10 @@ from nuclio import Context as NuclioContext
|
|
|
33
33
|
from nuclio.request import Logger as NuclioLogger
|
|
34
34
|
|
|
35
35
|
import mlrun
|
|
36
|
-
import mlrun.common.constants
|
|
37
36
|
import mlrun.common.helpers
|
|
38
37
|
import mlrun.common.schemas
|
|
38
|
+
import mlrun.common.schemas.model_monitoring.constants as mm_constants
|
|
39
|
+
import mlrun.datastore.datastore_profile as ds_profile
|
|
39
40
|
import mlrun.model_monitoring
|
|
40
41
|
import mlrun.utils
|
|
41
42
|
from mlrun.config import config
|
|
@@ -82,7 +83,6 @@ class _StreamContext:
|
|
|
82
83
|
self.hostname = socket.gethostname()
|
|
83
84
|
self.function_uri = function_uri
|
|
84
85
|
self.output_stream = None
|
|
85
|
-
stream_uri = None
|
|
86
86
|
log_stream = parameters.get(FileTargetKind.LOG_STREAM, "")
|
|
87
87
|
|
|
88
88
|
if (enabled or log_stream) and function_uri:
|
|
@@ -93,20 +93,16 @@ class _StreamContext:
|
|
|
93
93
|
|
|
94
94
|
stream_args = parameters.get("stream_args", {})
|
|
95
95
|
|
|
96
|
-
if log_stream == DUMMY_STREAM:
|
|
97
|
-
# Dummy stream used for testing, see tests/serving/test_serving.py
|
|
98
|
-
stream_uri = DUMMY_STREAM
|
|
99
|
-
elif not stream_args.get("mock"): # if not a mock: `context.is_mock = True`
|
|
100
|
-
stream_uri = mlrun.model_monitoring.get_stream_path(project=project)
|
|
101
|
-
|
|
102
96
|
if log_stream:
|
|
103
|
-
#
|
|
104
|
-
|
|
105
|
-
self.output_stream = get_stream_pusher(
|
|
97
|
+
# Get the output stream from the log stream path
|
|
98
|
+
stream_path = log_stream.format(project=project)
|
|
99
|
+
self.output_stream = get_stream_pusher(stream_path, **stream_args)
|
|
106
100
|
else:
|
|
107
101
|
# Get the output stream from the profile
|
|
108
102
|
self.output_stream = mlrun.model_monitoring.helpers.get_output_stream(
|
|
109
|
-
project=project,
|
|
103
|
+
project=project,
|
|
104
|
+
profile=parameters.get("stream_profile"),
|
|
105
|
+
mock=stream_args.get("mock", False),
|
|
110
106
|
)
|
|
111
107
|
|
|
112
108
|
|
|
@@ -184,11 +180,12 @@ class GraphServer(ModelObj):
|
|
|
184
180
|
self,
|
|
185
181
|
context,
|
|
186
182
|
namespace,
|
|
187
|
-
resource_cache: ResourceCache = None,
|
|
183
|
+
resource_cache: Optional[ResourceCache] = None,
|
|
188
184
|
logger=None,
|
|
189
185
|
is_mock=False,
|
|
190
186
|
monitoring_mock=False,
|
|
191
|
-
|
|
187
|
+
stream_profile: Optional[ds_profile.DatastoreProfile] = None,
|
|
188
|
+
) -> None:
|
|
192
189
|
"""for internal use, initialize all steps (recursively)"""
|
|
193
190
|
|
|
194
191
|
if self.secret_sources:
|
|
@@ -203,6 +200,20 @@ class GraphServer(ModelObj):
|
|
|
203
200
|
context.monitoring_mock = monitoring_mock
|
|
204
201
|
context.root = self.graph
|
|
205
202
|
|
|
203
|
+
if is_mock and monitoring_mock:
|
|
204
|
+
if stream_profile:
|
|
205
|
+
# Add the user-defined stream profile to the parameters
|
|
206
|
+
self.parameters["stream_profile"] = stream_profile
|
|
207
|
+
elif not (
|
|
208
|
+
self.parameters.get(FileTargetKind.LOG_STREAM)
|
|
209
|
+
or mlrun.get_secret_or_env(
|
|
210
|
+
mm_constants.ProjectSecretKeys.STREAM_PROFILE_NAME
|
|
211
|
+
)
|
|
212
|
+
):
|
|
213
|
+
# Set a dummy log stream for mocking purposes if there is no direct
|
|
214
|
+
# user-defined stream profile and no information in the environment
|
|
215
|
+
self.parameters[FileTargetKind.LOG_STREAM] = DUMMY_STREAM
|
|
216
|
+
|
|
206
217
|
context.stream = _StreamContext(
|
|
207
218
|
self.track_models, self.parameters, self.function_uri
|
|
208
219
|
)
|
|
@@ -406,6 +417,7 @@ def add_monitoring_general_steps(
|
|
|
406
417
|
"mlrun.serving.system_steps.BackgroundTaskStatus",
|
|
407
418
|
"background_task_status_step",
|
|
408
419
|
model_endpoint_creation_strategy=mlrun.common.schemas.ModelEndpointCreationStrategy.SKIP,
|
|
420
|
+
full_event=True,
|
|
409
421
|
)
|
|
410
422
|
monitor_flow_step = graph.add_step(
|
|
411
423
|
"storey.Filter",
|
mlrun/serving/states.py
CHANGED
|
@@ -39,7 +39,7 @@ import mlrun.common.schemas as schemas
|
|
|
39
39
|
from mlrun.artifacts.llm_prompt import LLMPromptArtifact, PlaceholderDefaultDict
|
|
40
40
|
from mlrun.artifacts.model import ModelArtifact
|
|
41
41
|
from mlrun.datastore.datastore_profile import (
|
|
42
|
-
|
|
42
|
+
DatastoreProfileKafkaStream,
|
|
43
43
|
DatastoreProfileKafkaTarget,
|
|
44
44
|
DatastoreProfileV3io,
|
|
45
45
|
datastore_profile_read,
|
|
@@ -3398,7 +3398,7 @@ def _init_async_objects(context, steps):
|
|
|
3398
3398
|
datastore_profile = datastore_profile_read(stream_path)
|
|
3399
3399
|
if isinstance(
|
|
3400
3400
|
datastore_profile,
|
|
3401
|
-
(DatastoreProfileKafkaTarget,
|
|
3401
|
+
(DatastoreProfileKafkaTarget, DatastoreProfileKafkaStream),
|
|
3402
3402
|
):
|
|
3403
3403
|
step._async_object = KafkaStoreyTarget(
|
|
3404
3404
|
path=stream_path,
|
|
@@ -3414,7 +3414,7 @@ def _init_async_objects(context, steps):
|
|
|
3414
3414
|
else:
|
|
3415
3415
|
raise mlrun.errors.MLRunValueError(
|
|
3416
3416
|
f"Received an unexpected stream profile type: {type(datastore_profile)}\n"
|
|
3417
|
-
"Expects `DatastoreProfileV3io` or `
|
|
3417
|
+
"Expects `DatastoreProfileV3io` or `DatastoreProfileKafkaStream`."
|
|
3418
3418
|
)
|
|
3419
3419
|
elif stream_path.startswith("kafka://") or kafka_brokers:
|
|
3420
3420
|
topic, brokers = parse_kafka_url(stream_path, kafka_brokers)
|
mlrun/serving/system_steps.py
CHANGED
|
@@ -11,7 +11,6 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
14
|
import random
|
|
16
15
|
from copy import copy
|
|
17
16
|
from datetime import timedelta
|
|
@@ -25,10 +24,27 @@ import mlrun.artifacts
|
|
|
25
24
|
import mlrun.common.schemas.model_monitoring as mm_schemas
|
|
26
25
|
import mlrun.feature_store
|
|
27
26
|
import mlrun.serving
|
|
27
|
+
from mlrun.common.model_monitoring.helpers import (
|
|
28
|
+
get_model_endpoints_creation_task_status,
|
|
29
|
+
)
|
|
28
30
|
from mlrun.common.schemas import MonitoringData
|
|
29
31
|
from mlrun.utils import get_data_from_path, logger
|
|
30
32
|
|
|
31
33
|
|
|
34
|
+
class MatchingEndpointsState(mlrun.common.types.StrEnum):
|
|
35
|
+
all_matched = "all_matched"
|
|
36
|
+
not_all_matched = "not_all_matched"
|
|
37
|
+
no_check_needed = "no_check_needed"
|
|
38
|
+
not_yet_checked = "not_yet_matched"
|
|
39
|
+
|
|
40
|
+
@staticmethod
|
|
41
|
+
def success_states() -> list[str]:
|
|
42
|
+
return [
|
|
43
|
+
MatchingEndpointsState.all_matched,
|
|
44
|
+
MatchingEndpointsState.no_check_needed,
|
|
45
|
+
]
|
|
46
|
+
|
|
47
|
+
|
|
32
48
|
class MonitoringPreProcessor(storey.MapClass):
|
|
33
49
|
"""preprocess step, reconstructs the serving output event body to StreamProcessingEvent schema"""
|
|
34
50
|
|
|
@@ -317,6 +333,9 @@ class BackgroundTaskStatus(storey.MapClass):
|
|
|
317
333
|
|
|
318
334
|
def __init__(self, **kwargs):
|
|
319
335
|
super().__init__(**kwargs)
|
|
336
|
+
self.matching_endpoints = MatchingEndpointsState.not_yet_checked
|
|
337
|
+
self.graph_model_endpoint_uids: set = set()
|
|
338
|
+
self.listed_model_endpoint_uids: set = set()
|
|
320
339
|
self.server: mlrun.serving.GraphServer = (
|
|
321
340
|
getattr(self.context, "server", None) if self.context else None
|
|
322
341
|
)
|
|
@@ -337,43 +356,47 @@ class BackgroundTaskStatus(storey.MapClass):
|
|
|
337
356
|
)
|
|
338
357
|
)
|
|
339
358
|
):
|
|
340
|
-
|
|
341
|
-
self.
|
|
342
|
-
|
|
343
|
-
|
|
344
|
-
self.
|
|
345
|
-
|
|
359
|
+
(
|
|
360
|
+
self._background_task_state,
|
|
361
|
+
self._background_task_check_timestamp,
|
|
362
|
+
self.listed_model_endpoint_uids,
|
|
363
|
+
) = get_model_endpoints_creation_task_status(self.server)
|
|
364
|
+
if (
|
|
365
|
+
self.listed_model_endpoint_uids
|
|
366
|
+
and self.matching_endpoints == MatchingEndpointsState.not_yet_checked
|
|
367
|
+
):
|
|
368
|
+
if not self.graph_model_endpoint_uids:
|
|
369
|
+
self.graph_model_endpoint_uids = collect_model_endpoint_uids(
|
|
370
|
+
self.server
|
|
371
|
+
)
|
|
372
|
+
|
|
373
|
+
if self.graph_model_endpoint_uids.issubset(self.listed_model_endpoint_uids):
|
|
374
|
+
self.matching_endpoints = MatchingEndpointsState.all_matched
|
|
375
|
+
elif self.listed_model_endpoint_uids is None:
|
|
376
|
+
self.matching_endpoints = MatchingEndpointsState.no_check_needed
|
|
346
377
|
|
|
347
378
|
if (
|
|
348
379
|
self._background_task_state
|
|
349
380
|
== mlrun.common.schemas.BackgroundTaskState.succeeded
|
|
381
|
+
and self.matching_endpoints in MatchingEndpointsState.success_states()
|
|
350
382
|
):
|
|
351
383
|
return event
|
|
352
384
|
else:
|
|
353
385
|
return None
|
|
354
386
|
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
)
|
|
369
|
-
else: # in progress
|
|
370
|
-
logger.info(
|
|
371
|
-
f"Model endpoint creation task is still in progress with the current state: "
|
|
372
|
-
f"{background_task_state}. Events will not be monitored for the next "
|
|
373
|
-
f"{mlrun.mlconf.model_endpoint_monitoring.model_endpoint_creation_check_period} seconds",
|
|
374
|
-
name=self.name,
|
|
375
|
-
background_task_check_timestamp=self._background_task_check_timestamp.isoformat(),
|
|
376
|
-
)
|
|
387
|
+
|
|
388
|
+
def collect_model_endpoint_uids(server: mlrun.serving.GraphServer) -> set[str]:
|
|
389
|
+
"""Collects all model endpoint UIDs from the server's graph steps."""
|
|
390
|
+
model_endpoint_uids = set()
|
|
391
|
+
for step in server.graph.steps.values():
|
|
392
|
+
if hasattr(step, "monitoring_data"):
|
|
393
|
+
for model in step.monitoring_data.keys():
|
|
394
|
+
uid = step.monitoring_data[model].get(
|
|
395
|
+
mlrun.common.schemas.MonitoringData.MODEL_ENDPOINT_UID
|
|
396
|
+
)
|
|
397
|
+
if uid:
|
|
398
|
+
model_endpoint_uids.add(uid)
|
|
399
|
+
return model_endpoint_uids
|
|
377
400
|
|
|
378
401
|
|
|
379
402
|
class SamplingStep(storey.MapClass):
|
mlrun/serving/v2_serving.py
CHANGED
|
@@ -24,6 +24,9 @@ import mlrun.common.schemas.model_monitoring
|
|
|
24
24
|
import mlrun.model_monitoring
|
|
25
25
|
from mlrun.utils import logger, now_date
|
|
26
26
|
|
|
27
|
+
from ..common.model_monitoring.helpers import (
|
|
28
|
+
get_model_endpoints_creation_task_status,
|
|
29
|
+
)
|
|
27
30
|
from .utils import StepToDict, _extract_input_data, _update_result_body
|
|
28
31
|
|
|
29
32
|
|
|
@@ -474,22 +477,18 @@ class V2ModelServer(StepToDict):
|
|
|
474
477
|
) or getattr(self.context, "server", None)
|
|
475
478
|
if not self.context.is_mock or self.context.monitoring_mock:
|
|
476
479
|
if server.model_endpoint_creation_task_name:
|
|
477
|
-
|
|
478
|
-
server
|
|
479
|
-
)
|
|
480
|
-
logger.debug(
|
|
481
|
-
"Checking model endpoint creation task status",
|
|
482
|
-
task_name=server.model_endpoint_creation_task_name,
|
|
480
|
+
background_task_state, _, _ = get_model_endpoints_creation_task_status(
|
|
481
|
+
server
|
|
483
482
|
)
|
|
484
483
|
if (
|
|
485
|
-
|
|
484
|
+
background_task_state
|
|
486
485
|
in mlrun.common.schemas.BackgroundTaskState.terminal_states()
|
|
487
486
|
):
|
|
488
487
|
logger.debug(
|
|
489
|
-
f"Model endpoint creation task completed with state {
|
|
488
|
+
f"Model endpoint creation task completed with state {background_task_state}"
|
|
490
489
|
)
|
|
491
490
|
if (
|
|
492
|
-
|
|
491
|
+
background_task_state
|
|
493
492
|
== mlrun.common.schemas.BackgroundTaskState.succeeded
|
|
494
493
|
):
|
|
495
494
|
self._model_logger = (
|
|
@@ -504,7 +503,7 @@ class V2ModelServer(StepToDict):
|
|
|
504
503
|
else: # in progress
|
|
505
504
|
logger.debug(
|
|
506
505
|
f"Model endpoint creation task is still in progress with the current state: "
|
|
507
|
-
f"{
|
|
506
|
+
f"{background_task_state}.",
|
|
508
507
|
name=self.name,
|
|
509
508
|
)
|
|
510
509
|
else:
|
mlrun/utils/helpers.py
CHANGED
|
@@ -804,7 +804,7 @@ def remove_tag_from_artifact_uri(uri: str) -> Optional[str]:
|
|
|
804
804
|
|
|
805
805
|
def extend_hub_uri_if_needed(uri) -> tuple[str, bool]:
|
|
806
806
|
"""
|
|
807
|
-
Retrieve the full uri of the
|
|
807
|
+
Retrieve the full uri of the function's yaml in the hub.
|
|
808
808
|
|
|
809
809
|
:param uri: structure: "hub://[<source>/]<item-name>[:<tag>]"
|
|
810
810
|
|
|
@@ -845,7 +845,10 @@ def extend_hub_uri_if_needed(uri) -> tuple[str, bool]:
|
|
|
845
845
|
# hub function directory name are with underscores instead of hyphens
|
|
846
846
|
name = name.replace("-", "_")
|
|
847
847
|
function_suffix = f"{name}/{tag}/src/function.yaml"
|
|
848
|
-
|
|
848
|
+
function_type = mlrun.common.schemas.hub.HubSourceType.functions
|
|
849
|
+
return indexed_source.source.get_full_uri(
|
|
850
|
+
function_suffix, function_type
|
|
851
|
+
), is_hub_uri
|
|
849
852
|
|
|
850
853
|
|
|
851
854
|
def gen_md_table(header, rows=None):
|
mlrun/utils/version/version.json
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: mlrun
|
|
3
|
-
Version: 1.10.
|
|
3
|
+
Version: 1.10.0rc26
|
|
4
4
|
Summary: Tracking and config of machine learning runs
|
|
5
5
|
Home-page: https://github.com/mlrun/mlrun
|
|
6
6
|
Author: Yaron Haviv
|
|
@@ -43,9 +43,9 @@ Requires-Dist: v3io-frames~=0.10.15; python_version < "3.11"
|
|
|
43
43
|
Requires-Dist: v3io-frames>=0.13.0; python_version >= "3.11"
|
|
44
44
|
Requires-Dist: semver~=3.0
|
|
45
45
|
Requires-Dist: dependency-injector~=4.41
|
|
46
|
-
Requires-Dist: fsspec
|
|
46
|
+
Requires-Dist: fsspec<=2025.7.0,>=2025.5.1
|
|
47
47
|
Requires-Dist: v3iofs~=0.1.17
|
|
48
|
-
Requires-Dist: storey~=1.10.
|
|
48
|
+
Requires-Dist: storey~=1.10.13
|
|
49
49
|
Requires-Dist: inflection~=0.5.0
|
|
50
50
|
Requires-Dist: python-dotenv~=1.0
|
|
51
51
|
Requires-Dist: setuptools>=75.2
|
|
@@ -56,14 +56,15 @@ Requires-Dist: mlrun-pipelines-kfp-common~=0.5.8
|
|
|
56
56
|
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7
|
|
57
57
|
Requires-Dist: docstring_parser~=0.16
|
|
58
58
|
Requires-Dist: aiosmtplib~=3.0
|
|
59
|
+
Requires-Dist: deepdiff<9.0.0,>=8.6.1
|
|
59
60
|
Provides-Extra: s3
|
|
60
61
|
Requires-Dist: boto3<1.36,>=1.28.0; extra == "s3"
|
|
61
62
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "s3"
|
|
62
|
-
Requires-Dist: s3fs
|
|
63
|
+
Requires-Dist: s3fs<=2025.7.0,>=2025.5.1; extra == "s3"
|
|
63
64
|
Provides-Extra: azure-blob-storage
|
|
64
65
|
Requires-Dist: msrest~=0.6.21; extra == "azure-blob-storage"
|
|
65
66
|
Requires-Dist: azure-core~=1.24; extra == "azure-blob-storage"
|
|
66
|
-
Requires-Dist: adlfs==
|
|
67
|
+
Requires-Dist: adlfs==2024.12.0; extra == "azure-blob-storage"
|
|
67
68
|
Requires-Dist: pyopenssl>=23; extra == "azure-blob-storage"
|
|
68
69
|
Provides-Extra: azure-key-vault
|
|
69
70
|
Requires-Dist: azure-identity~=1.5; extra == "azure-key-vault"
|
|
@@ -78,7 +79,7 @@ Requires-Dist: google-cloud-storage==2.14.0; extra == "google-cloud"
|
|
|
78
79
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "google-cloud"
|
|
79
80
|
Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "google-cloud"
|
|
80
81
|
Requires-Dist: google-cloud==0.34; extra == "google-cloud"
|
|
81
|
-
Requires-Dist: gcsfs
|
|
82
|
+
Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "google-cloud"
|
|
82
83
|
Provides-Extra: kafka
|
|
83
84
|
Requires-Dist: kafka-python~=2.1.0; extra == "kafka"
|
|
84
85
|
Requires-Dist: avro~=1.11; extra == "kafka"
|
|
@@ -96,8 +97,8 @@ Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "da
|
|
|
96
97
|
Requires-Dist: dask~=2023.12.1; python_version < "3.11" and extra == "dask"
|
|
97
98
|
Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "dask"
|
|
98
99
|
Provides-Extra: alibaba-oss
|
|
99
|
-
Requires-Dist: ossfs==
|
|
100
|
-
Requires-Dist: oss2==2.18.
|
|
100
|
+
Requires-Dist: ossfs==2025.5.0; extra == "alibaba-oss"
|
|
101
|
+
Requires-Dist: oss2==2.18.4; extra == "alibaba-oss"
|
|
101
102
|
Provides-Extra: tdengine
|
|
102
103
|
Requires-Dist: taos-ws-py==0.3.2; extra == "tdengine"
|
|
103
104
|
Provides-Extra: snowflake
|
|
@@ -126,7 +127,7 @@ Requires-Dist: pydantic<2,>=1; extra == "api"
|
|
|
126
127
|
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7; extra == "api"
|
|
127
128
|
Requires-Dist: grpcio~=1.70.0; extra == "api"
|
|
128
129
|
Provides-Extra: all
|
|
129
|
-
Requires-Dist: adlfs==
|
|
130
|
+
Requires-Dist: adlfs==2024.12.0; extra == "all"
|
|
130
131
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "all"
|
|
131
132
|
Requires-Dist: avro~=1.11; extra == "all"
|
|
132
133
|
Requires-Dist: azure-core~=1.24; extra == "all"
|
|
@@ -138,7 +139,7 @@ Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "all"
|
|
|
138
139
|
Requires-Dist: databricks-sdk~=0.20.0; extra == "all"
|
|
139
140
|
Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "all"
|
|
140
141
|
Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "all"
|
|
141
|
-
Requires-Dist: gcsfs
|
|
142
|
+
Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "all"
|
|
142
143
|
Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "all"
|
|
143
144
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "all"
|
|
144
145
|
Requires-Dist: google-cloud-storage==2.14.0; extra == "all"
|
|
@@ -147,17 +148,17 @@ Requires-Dist: graphviz~=0.20.0; extra == "all"
|
|
|
147
148
|
Requires-Dist: kafka-python~=2.1.0; extra == "all"
|
|
148
149
|
Requires-Dist: mlflow~=2.22; extra == "all"
|
|
149
150
|
Requires-Dist: msrest~=0.6.21; extra == "all"
|
|
150
|
-
Requires-Dist: oss2==2.18.
|
|
151
|
-
Requires-Dist: ossfs==
|
|
151
|
+
Requires-Dist: oss2==2.18.4; extra == "all"
|
|
152
|
+
Requires-Dist: ossfs==2025.5.0; extra == "all"
|
|
152
153
|
Requires-Dist: plotly~=5.23; extra == "all"
|
|
153
154
|
Requires-Dist: pyopenssl>=23; extra == "all"
|
|
154
155
|
Requires-Dist: redis~=4.3; extra == "all"
|
|
155
|
-
Requires-Dist: s3fs
|
|
156
|
+
Requires-Dist: s3fs<=2025.7.0,>=2025.5.1; extra == "all"
|
|
156
157
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "all"
|
|
157
158
|
Requires-Dist: sqlalchemy~=2.0; extra == "all"
|
|
158
159
|
Requires-Dist: taos-ws-py==0.3.2; extra == "all"
|
|
159
160
|
Provides-Extra: complete
|
|
160
|
-
Requires-Dist: adlfs==
|
|
161
|
+
Requires-Dist: adlfs==2024.12.0; extra == "complete"
|
|
161
162
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete"
|
|
162
163
|
Requires-Dist: avro~=1.11; extra == "complete"
|
|
163
164
|
Requires-Dist: azure-core~=1.24; extra == "complete"
|
|
@@ -169,7 +170,7 @@ Requires-Dist: dask~=2024.12.1; python_version >= "3.11" and extra == "complete"
|
|
|
169
170
|
Requires-Dist: databricks-sdk~=0.20.0; extra == "complete"
|
|
170
171
|
Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "complete"
|
|
171
172
|
Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "complete"
|
|
172
|
-
Requires-Dist: gcsfs
|
|
173
|
+
Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "complete"
|
|
173
174
|
Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete"
|
|
174
175
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "complete"
|
|
175
176
|
Requires-Dist: google-cloud-storage==2.14.0; extra == "complete"
|
|
@@ -178,17 +179,17 @@ Requires-Dist: graphviz~=0.20.0; extra == "complete"
|
|
|
178
179
|
Requires-Dist: kafka-python~=2.1.0; extra == "complete"
|
|
179
180
|
Requires-Dist: mlflow~=2.22; extra == "complete"
|
|
180
181
|
Requires-Dist: msrest~=0.6.21; extra == "complete"
|
|
181
|
-
Requires-Dist: oss2==2.18.
|
|
182
|
-
Requires-Dist: ossfs==
|
|
182
|
+
Requires-Dist: oss2==2.18.4; extra == "complete"
|
|
183
|
+
Requires-Dist: ossfs==2025.5.0; extra == "complete"
|
|
183
184
|
Requires-Dist: plotly~=5.23; extra == "complete"
|
|
184
185
|
Requires-Dist: pyopenssl>=23; extra == "complete"
|
|
185
186
|
Requires-Dist: redis~=4.3; extra == "complete"
|
|
186
|
-
Requires-Dist: s3fs
|
|
187
|
+
Requires-Dist: s3fs<=2025.7.0,>=2025.5.1; extra == "complete"
|
|
187
188
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "complete"
|
|
188
189
|
Requires-Dist: sqlalchemy~=2.0; extra == "complete"
|
|
189
190
|
Requires-Dist: taos-ws-py==0.3.2; extra == "complete"
|
|
190
191
|
Provides-Extra: complete-api
|
|
191
|
-
Requires-Dist: adlfs==
|
|
192
|
+
Requires-Dist: adlfs==2024.12.0; extra == "complete-api"
|
|
192
193
|
Requires-Dist: aiobotocore<2.16,>=2.5.0; extra == "complete-api"
|
|
193
194
|
Requires-Dist: aiosmtplib~=3.0; extra == "complete-api"
|
|
194
195
|
Requires-Dist: alembic~=1.14; extra == "complete-api"
|
|
@@ -205,7 +206,7 @@ Requires-Dist: databricks-sdk~=0.20.0; extra == "complete-api"
|
|
|
205
206
|
Requires-Dist: distributed~=2023.12.1; python_version < "3.11" and extra == "complete-api"
|
|
206
207
|
Requires-Dist: distributed~=2024.12.1; python_version >= "3.11" and extra == "complete-api"
|
|
207
208
|
Requires-Dist: fastapi~=0.116.0; extra == "complete-api"
|
|
208
|
-
Requires-Dist: gcsfs
|
|
209
|
+
Requires-Dist: gcsfs<=2025.7.0,>=2025.5.1; extra == "complete-api"
|
|
209
210
|
Requires-Dist: google-cloud-bigquery-storage~=2.17; extra == "complete-api"
|
|
210
211
|
Requires-Dist: google-cloud-bigquery[bqstorage,pandas]==3.14.1; extra == "complete-api"
|
|
211
212
|
Requires-Dist: google-cloud-storage==2.14.0; extra == "complete-api"
|
|
@@ -220,15 +221,15 @@ Requires-Dist: mlflow~=2.22; extra == "complete-api"
|
|
|
220
221
|
Requires-Dist: mlrun-pipelines-kfp-v1-8~=0.5.7; extra == "complete-api"
|
|
221
222
|
Requires-Dist: msrest~=0.6.21; extra == "complete-api"
|
|
222
223
|
Requires-Dist: objgraph~=3.6; extra == "complete-api"
|
|
223
|
-
Requires-Dist: oss2==2.18.
|
|
224
|
-
Requires-Dist: ossfs==
|
|
224
|
+
Requires-Dist: oss2==2.18.4; extra == "complete-api"
|
|
225
|
+
Requires-Dist: ossfs==2025.5.0; extra == "complete-api"
|
|
225
226
|
Requires-Dist: plotly~=5.23; extra == "complete-api"
|
|
226
227
|
Requires-Dist: psycopg2-binary~=2.9; extra == "complete-api"
|
|
227
228
|
Requires-Dist: pydantic<2,>=1; extra == "complete-api"
|
|
228
229
|
Requires-Dist: pymysql~=1.1; extra == "complete-api"
|
|
229
230
|
Requires-Dist: pyopenssl>=23; extra == "complete-api"
|
|
230
231
|
Requires-Dist: redis~=4.3; extra == "complete-api"
|
|
231
|
-
Requires-Dist: s3fs
|
|
232
|
+
Requires-Dist: s3fs<=2025.7.0,>=2025.5.1; extra == "complete-api"
|
|
232
233
|
Requires-Dist: snowflake-connector-python~=3.7; extra == "complete-api"
|
|
233
234
|
Requires-Dist: sqlalchemy-utils~=0.41.2; extra == "complete-api"
|
|
234
235
|
Requires-Dist: sqlalchemy~=2.0; extra == "complete-api"
|