mlrun 1.10.0rc40__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +3 -2
- mlrun/__main__.py +0 -4
- mlrun/artifacts/dataset.py +2 -2
- mlrun/artifacts/plots.py +1 -1
- mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
- mlrun/auth/nuclio.py +89 -0
- mlrun/auth/providers.py +429 -0
- mlrun/auth/utils.py +415 -0
- mlrun/common/constants.py +7 -0
- mlrun/common/model_monitoring/helpers.py +41 -4
- mlrun/common/runtimes/constants.py +28 -0
- mlrun/common/schemas/__init__.py +13 -3
- mlrun/common/schemas/alert.py +2 -2
- mlrun/common/schemas/api_gateway.py +3 -0
- mlrun/common/schemas/auth.py +10 -10
- mlrun/common/schemas/client_spec.py +4 -0
- mlrun/common/schemas/constants.py +25 -0
- mlrun/common/schemas/frontend_spec.py +1 -8
- mlrun/common/schemas/function.py +24 -0
- mlrun/common/schemas/hub.py +3 -2
- mlrun/common/schemas/model_monitoring/__init__.py +1 -1
- mlrun/common/schemas/model_monitoring/constants.py +2 -2
- mlrun/common/schemas/secret.py +17 -2
- mlrun/common/secrets.py +95 -1
- mlrun/common/types.py +10 -10
- mlrun/config.py +53 -15
- mlrun/data_types/infer.py +2 -2
- mlrun/datastore/__init__.py +2 -3
- mlrun/datastore/base.py +274 -10
- mlrun/datastore/datastore.py +1 -1
- mlrun/datastore/datastore_profile.py +49 -17
- mlrun/datastore/model_provider/huggingface_provider.py +6 -2
- mlrun/datastore/model_provider/model_provider.py +2 -2
- mlrun/datastore/model_provider/openai_provider.py +2 -2
- mlrun/datastore/s3.py +15 -16
- mlrun/datastore/sources.py +1 -1
- mlrun/datastore/store_resources.py +4 -4
- mlrun/datastore/storeytargets.py +16 -10
- mlrun/datastore/targets.py +1 -1
- mlrun/datastore/utils.py +16 -3
- mlrun/datastore/v3io.py +1 -1
- mlrun/db/base.py +36 -12
- mlrun/db/httpdb.py +316 -101
- mlrun/db/nopdb.py +29 -11
- mlrun/errors.py +4 -2
- mlrun/execution.py +11 -12
- mlrun/feature_store/api.py +1 -1
- mlrun/feature_store/common.py +1 -1
- mlrun/feature_store/feature_vector_utils.py +1 -1
- mlrun/feature_store/steps.py +8 -6
- mlrun/frameworks/_common/utils.py +3 -3
- mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
- mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
- mlrun/frameworks/_ml_common/utils.py +2 -1
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
- mlrun/frameworks/onnx/dataset.py +2 -1
- mlrun/frameworks/onnx/mlrun_interface.py +2 -1
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
- mlrun/frameworks/pytorch/utils.py +2 -1
- mlrun/frameworks/sklearn/metric.py +2 -1
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
- mlrun/hub/__init__.py +37 -0
- mlrun/hub/base.py +142 -0
- mlrun/hub/module.py +67 -76
- mlrun/hub/step.py +113 -0
- mlrun/launcher/base.py +2 -1
- mlrun/launcher/local.py +2 -1
- mlrun/model.py +12 -2
- mlrun/model_monitoring/__init__.py +0 -1
- mlrun/model_monitoring/api.py +2 -2
- mlrun/model_monitoring/applications/base.py +20 -6
- mlrun/model_monitoring/applications/context.py +1 -0
- mlrun/model_monitoring/controller.py +7 -17
- mlrun/model_monitoring/db/_schedules.py +2 -16
- mlrun/model_monitoring/db/_stats.py +2 -13
- mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
- mlrun/model_monitoring/db/tsdb/base.py +2 -4
- mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
- mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
- mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +4 -6
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +147 -79
- mlrun/model_monitoring/features_drift_table.py +2 -1
- mlrun/model_monitoring/helpers.py +2 -1
- mlrun/model_monitoring/stream_processing.py +18 -16
- mlrun/model_monitoring/writer.py +4 -3
- mlrun/package/__init__.py +2 -1
- mlrun/platforms/__init__.py +0 -44
- mlrun/platforms/iguazio.py +1 -1
- mlrun/projects/operations.py +11 -10
- mlrun/projects/project.py +81 -82
- mlrun/run.py +4 -7
- mlrun/runtimes/__init__.py +2 -204
- mlrun/runtimes/base.py +89 -21
- mlrun/runtimes/constants.py +225 -0
- mlrun/runtimes/daskjob.py +4 -2
- mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
- mlrun/runtimes/mounts.py +5 -0
- mlrun/runtimes/nuclio/__init__.py +12 -8
- mlrun/runtimes/nuclio/api_gateway.py +36 -6
- mlrun/runtimes/nuclio/application/application.py +200 -32
- mlrun/runtimes/nuclio/function.py +154 -49
- mlrun/runtimes/nuclio/serving.py +55 -42
- mlrun/runtimes/pod.py +59 -10
- mlrun/secrets.py +46 -2
- mlrun/serving/__init__.py +2 -0
- mlrun/serving/remote.py +5 -5
- mlrun/serving/routers.py +3 -3
- mlrun/serving/server.py +46 -43
- mlrun/serving/serving_wrapper.py +6 -2
- mlrun/serving/states.py +554 -207
- mlrun/serving/steps.py +1 -1
- mlrun/serving/system_steps.py +42 -33
- mlrun/track/trackers/mlflow_tracker.py +29 -31
- mlrun/utils/helpers.py +89 -16
- mlrun/utils/http.py +9 -2
- mlrun/utils/notifications/notification/git.py +1 -1
- mlrun/utils/notifications/notification/mail.py +39 -16
- mlrun/utils/notifications/notification_pusher.py +2 -2
- mlrun/utils/version/version.json +2 -2
- mlrun/utils/version/version.py +3 -4
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +39 -49
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +144 -130
- mlrun/db/auth_utils.py +0 -152
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -343
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1368
- mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +0 -51
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
- {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py
CHANGED
|
@@ -26,11 +26,11 @@ import typing
|
|
|
26
26
|
import uuid
|
|
27
27
|
import warnings
|
|
28
28
|
import zipfile
|
|
29
|
+
from collections.abc import Callable
|
|
29
30
|
from copy import deepcopy
|
|
30
31
|
from os import environ, makedirs, path
|
|
31
|
-
from typing import
|
|
32
|
+
from typing import Optional, Union, cast
|
|
32
33
|
|
|
33
|
-
import deprecated
|
|
34
34
|
import dotenv
|
|
35
35
|
import git
|
|
36
36
|
import git.exc
|
|
@@ -45,6 +45,7 @@ import mlrun.common.runtimes.constants
|
|
|
45
45
|
import mlrun.common.schemas.alert
|
|
46
46
|
import mlrun.common.schemas.artifact
|
|
47
47
|
import mlrun.common.schemas.model_monitoring.constants as mm_constants
|
|
48
|
+
import mlrun.common.secrets
|
|
48
49
|
import mlrun.datastore.datastore_profile
|
|
49
50
|
import mlrun.db
|
|
50
51
|
import mlrun.errors
|
|
@@ -166,7 +167,7 @@ def new_project(
|
|
|
166
167
|
in the project root dir, it will be executed upon project creation or loading.
|
|
167
168
|
|
|
168
169
|
|
|
169
|
-
|
|
170
|
+
Example::
|
|
170
171
|
|
|
171
172
|
# create a project with local and hub functions, a workflow, and an artifact
|
|
172
173
|
project = mlrun.new_project(
|
|
@@ -183,7 +184,7 @@ def new_project(
|
|
|
183
184
|
# run the "main" workflow (watch=True to wait for run completion)
|
|
184
185
|
project.run("main", watch=True)
|
|
185
186
|
|
|
186
|
-
|
|
187
|
+
Example (load from template)::
|
|
187
188
|
|
|
188
189
|
# create a new project from a zip template (can also use yaml/git templates)
|
|
189
190
|
# initialize a local git, and register the git remote path
|
|
@@ -197,7 +198,7 @@ def new_project(
|
|
|
197
198
|
project.run("main", watch=True)
|
|
198
199
|
|
|
199
200
|
|
|
200
|
-
|
|
201
|
+
Example using project_setup.py to init the project objects::
|
|
201
202
|
|
|
202
203
|
def setup(project):
|
|
203
204
|
project.set_function(
|
|
@@ -1281,7 +1282,7 @@ class MlrunProject(ModelObj):
|
|
|
1281
1282
|
) -> str:
|
|
1282
1283
|
"""return the project artifact uri (store://..) from the artifact key
|
|
1283
1284
|
|
|
1284
|
-
|
|
1285
|
+
Example::
|
|
1285
1286
|
|
|
1286
1287
|
uri = project.get_artifact_uri("my_model", category="model", tag="prod", iter=0)
|
|
1287
1288
|
|
|
@@ -1459,7 +1460,7 @@ class MlrunProject(ModelObj):
|
|
|
1459
1460
|
):
|
|
1460
1461
|
"""add/set an artifact in the project spec (will be registered on load)
|
|
1461
1462
|
|
|
1462
|
-
|
|
1463
|
+
Example::
|
|
1463
1464
|
|
|
1464
1465
|
# register a simple file artifact
|
|
1465
1466
|
project.set_artifact("data", target_path=data_url)
|
|
@@ -1610,7 +1611,7 @@ class MlrunProject(ModelObj):
|
|
|
1610
1611
|
|
|
1611
1612
|
If the artifact already exists with the same key and tag, it will be overwritten.
|
|
1612
1613
|
|
|
1613
|
-
|
|
1614
|
+
Example::
|
|
1614
1615
|
|
|
1615
1616
|
project.log_artifact(
|
|
1616
1617
|
"some-data",
|
|
@@ -1714,7 +1715,7 @@ class MlrunProject(ModelObj):
|
|
|
1714
1715
|
|
|
1715
1716
|
If the dataset already exists with the same key and tag, it will be overwritten.
|
|
1716
1717
|
|
|
1717
|
-
|
|
1718
|
+
Example::
|
|
1718
1719
|
|
|
1719
1720
|
raw_data = {
|
|
1720
1721
|
"first_name": ["Jason", "Molly", "Tina", "Jake", "Amy"],
|
|
@@ -1801,7 +1802,7 @@ class MlrunProject(ModelObj):
|
|
|
1801
1802
|
|
|
1802
1803
|
If the model already exists with the same key and tag, it will be overwritten.
|
|
1803
1804
|
|
|
1804
|
-
|
|
1805
|
+
Example::
|
|
1805
1806
|
|
|
1806
1807
|
project.log_model(
|
|
1807
1808
|
"model",
|
|
@@ -2043,11 +2044,12 @@ class MlrunProject(ModelObj):
|
|
|
2043
2044
|
This wrapper provides both access to the original vector
|
|
2044
2045
|
store's capabilities and additional MLRun functionality.
|
|
2045
2046
|
|
|
2046
|
-
Example
|
|
2047
|
-
|
|
2048
|
-
|
|
2049
|
-
|
|
2050
|
-
|
|
2047
|
+
Example::
|
|
2048
|
+
|
|
2049
|
+
vector_store = Chroma(embedding_function=embeddings)
|
|
2050
|
+
collection = project.get_vector_store_collection(
|
|
2051
|
+
vector_store, collection_name="my_collection"
|
|
2052
|
+
)
|
|
2051
2053
|
"""
|
|
2052
2054
|
return VectorStoreCollection(
|
|
2053
2055
|
self,
|
|
@@ -2098,16 +2100,17 @@ class MlrunProject(ModelObj):
|
|
|
2098
2100
|
:param kwargs: Additional keyword arguments
|
|
2099
2101
|
:return: DocumentArtifact object
|
|
2100
2102
|
|
|
2101
|
-
Example
|
|
2102
|
-
|
|
2103
|
-
|
|
2104
|
-
|
|
2105
|
-
|
|
2106
|
-
|
|
2107
|
-
|
|
2108
|
-
|
|
2109
|
-
|
|
2110
|
-
|
|
2103
|
+
Example::
|
|
2104
|
+
|
|
2105
|
+
# Log a PDF document with custom loader
|
|
2106
|
+
project.log_document(
|
|
2107
|
+
local_path="path/to/doc.pdf",
|
|
2108
|
+
document_loader=DocumentLoaderSpec(
|
|
2109
|
+
loader_class_name="langchain_community.document_loaders.PDFLoader",
|
|
2110
|
+
src_name="file_path",
|
|
2111
|
+
kwargs={"extract_images": True},
|
|
2112
|
+
),
|
|
2113
|
+
)
|
|
2111
2114
|
|
|
2112
2115
|
"""
|
|
2113
2116
|
if not key and not local_path and not target_path:
|
|
@@ -2764,9 +2767,9 @@ class MlrunProject(ModelObj):
|
|
|
2764
2767
|
|
|
2765
2768
|
Supported URL prefixes:
|
|
2766
2769
|
|
|
2767
|
-
|
|
2768
|
-
|
|
2769
|
-
|
|
2770
|
+
| Object (s3://, v3io://, ..)
|
|
2771
|
+
| MLRun DB e.g. db://project/func:ver
|
|
2772
|
+
| Hub/market: e.g. hub://auto-trainer:master
|
|
2770
2773
|
|
|
2771
2774
|
Examples::
|
|
2772
2775
|
|
|
@@ -3015,20 +3018,6 @@ class MlrunProject(ModelObj):
|
|
|
3015
3018
|
|
|
3016
3019
|
self.spec.set_function(name, function_object, func)
|
|
3017
3020
|
|
|
3018
|
-
# TODO: Remove this in 1.11.0
|
|
3019
|
-
@deprecated.deprecated(
|
|
3020
|
-
version="1.8.0",
|
|
3021
|
-
reason="'remove_function' is deprecated and will be removed in 1.11.0. "
|
|
3022
|
-
"Please use `delete_function` instead.",
|
|
3023
|
-
category=FutureWarning,
|
|
3024
|
-
)
|
|
3025
|
-
def remove_function(self, name):
|
|
3026
|
-
"""remove the specified function from the project
|
|
3027
|
-
|
|
3028
|
-
:param name: name of the function (under the project)
|
|
3029
|
-
"""
|
|
3030
|
-
self.spec.remove_function(name)
|
|
3031
|
-
|
|
3032
3021
|
def delete_function(self, name, delete_from_db=False):
|
|
3033
3022
|
"""deletes the specified function from the project
|
|
3034
3023
|
|
|
@@ -3418,7 +3407,12 @@ class MlrunProject(ModelObj):
|
|
|
3418
3407
|
self._initialized = True
|
|
3419
3408
|
return self.spec._function_objects
|
|
3420
3409
|
|
|
3421
|
-
def with_secrets(
|
|
3410
|
+
def with_secrets(
|
|
3411
|
+
self,
|
|
3412
|
+
kind,
|
|
3413
|
+
source,
|
|
3414
|
+
prefix="",
|
|
3415
|
+
):
|
|
3422
3416
|
"""register a secrets source (file, env or dict)
|
|
3423
3417
|
|
|
3424
3418
|
read secrets from a source provider to be used in workflows, example::
|
|
@@ -3440,12 +3434,19 @@ class MlrunProject(ModelObj):
|
|
|
3440
3434
|
|
|
3441
3435
|
This will enable access to all secrets in vault registered to the current project.
|
|
3442
3436
|
|
|
3443
|
-
:param kind: secret type (file, inline, env, vault)
|
|
3437
|
+
:param kind: secret type (file, inline, env, vault, azure_vault)
|
|
3444
3438
|
:param source: secret data or link (see example)
|
|
3445
3439
|
:param prefix: add a prefix to the keys in this source
|
|
3446
3440
|
|
|
3447
3441
|
:returns: project object
|
|
3448
3442
|
"""
|
|
3443
|
+
# Block using mlrun-auth-secrets.* via azure_vault's k8s_secret param (client-side only)
|
|
3444
|
+
if kind == "azure_vault" and isinstance(source, dict):
|
|
3445
|
+
candidate_secret_name = (source.get("k8s_secret") or "").strip()
|
|
3446
|
+
if candidate_secret_name:
|
|
3447
|
+
mlrun.common.secrets.validate_not_forbidden_secret(
|
|
3448
|
+
candidate_secret_name
|
|
3449
|
+
)
|
|
3449
3450
|
|
|
3450
3451
|
if kind == "vault" and isinstance(source, list):
|
|
3451
3452
|
source = {"project": self.metadata.name, "secrets": source}
|
|
@@ -3472,7 +3473,7 @@ class MlrunProject(ModelObj):
|
|
|
3472
3473
|
when using a secrets file it should have lines in the form KEY=VALUE, comment line start with "#"
|
|
3473
3474
|
V3IO paths/credentials and MLrun service API address are dropped from the secrets
|
|
3474
3475
|
|
|
3475
|
-
|
|
3476
|
+
Example secrets file:
|
|
3476
3477
|
|
|
3477
3478
|
.. code-block:: shell
|
|
3478
3479
|
|
|
@@ -3823,25 +3824,26 @@ class MlrunProject(ModelObj):
|
|
|
3823
3824
|
Please note that you have to set the credentials before deploying any model monitoring application
|
|
3824
3825
|
or a tracked serving function.
|
|
3825
3826
|
|
|
3826
|
-
For example, the full flow for enabling model monitoring infrastructure with **
|
|
3827
|
+
For example, the full flow for enabling model monitoring infrastructure with **TimescaleDB** and **Kafka**, is:
|
|
3827
3828
|
|
|
3828
3829
|
.. code-block:: python
|
|
3829
3830
|
|
|
3830
3831
|
import mlrun
|
|
3831
3832
|
from mlrun.datastore.datastore_profile import (
|
|
3832
3833
|
DatastoreProfileKafkaStream,
|
|
3833
|
-
|
|
3834
|
+
DatastoreProfilePostgreSQL,
|
|
3834
3835
|
)
|
|
3835
3836
|
|
|
3836
3837
|
project = mlrun.get_or_create_project("mm-infra-setup")
|
|
3837
3838
|
|
|
3838
3839
|
# Create and register TSDB profile
|
|
3839
|
-
tsdb_profile =
|
|
3840
|
-
name="my-
|
|
3841
|
-
host="<
|
|
3842
|
-
port=
|
|
3843
|
-
user="
|
|
3844
|
-
password="<
|
|
3840
|
+
tsdb_profile = DatastoreProfilePostgreSQL(
|
|
3841
|
+
name="my-timescaledb",
|
|
3842
|
+
host="<timescaledb-server-ip-address>",
|
|
3843
|
+
port=5432,
|
|
3844
|
+
user="postgres",
|
|
3845
|
+
password="<timescaledb-password>",
|
|
3846
|
+
database="mlrun",
|
|
3845
3847
|
)
|
|
3846
3848
|
project.register_datastore_profile(tsdb_profile)
|
|
3847
3849
|
|
|
@@ -3912,7 +3914,7 @@ class MlrunProject(ModelObj):
|
|
|
3912
3914
|
monitoring. The supported profiles are:
|
|
3913
3915
|
|
|
3914
3916
|
* :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
|
|
3915
|
-
* :py:class:`~mlrun.datastore.datastore_profile.
|
|
3917
|
+
* :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfilePostgreSQL`
|
|
3916
3918
|
|
|
3917
3919
|
You need to register one of them, and pass the profile's name.
|
|
3918
3920
|
:param stream_profile_name: The datastore profile name of the stream to be used in model monitoring.
|
|
@@ -4058,7 +4060,7 @@ class MlrunProject(ModelObj):
|
|
|
4058
4060
|
) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
|
|
4059
4061
|
"""Run a local or remote task as part of a local/kubeflow pipeline
|
|
4060
4062
|
|
|
4061
|
-
|
|
4063
|
+
Example (use with project)::
|
|
4062
4064
|
|
|
4063
4065
|
# create a project with two functions (local and from hub)
|
|
4064
4066
|
project = mlrun.new_project(project_name, "./proj")
|
|
@@ -4462,7 +4464,6 @@ class MlrunProject(ModelObj):
|
|
|
4462
4464
|
kind: Optional[str] = None,
|
|
4463
4465
|
category: typing.Union[str, mlrun.common.schemas.ArtifactCategories] = None,
|
|
4464
4466
|
tree: Optional[str] = None,
|
|
4465
|
-
limit: Optional[int] = None,
|
|
4466
4467
|
format_: Optional[
|
|
4467
4468
|
mlrun.common.formatters.ArtifactFormat
|
|
4468
4469
|
] = mlrun.common.formatters.ArtifactFormat.full,
|
|
@@ -4512,7 +4513,6 @@ class MlrunProject(ModelObj):
|
|
|
4512
4513
|
:param kind: Return artifacts of the requested kind.
|
|
4513
4514
|
:param category: Return artifacts of the requested category.
|
|
4514
4515
|
:param tree: Return artifacts of the requested tree.
|
|
4515
|
-
:param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
|
|
4516
4516
|
:param format_: The format in which to return the artifacts. Default is 'full'.
|
|
4517
4517
|
:param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
|
|
4518
4518
|
parameter must be provided as well.
|
|
@@ -4524,13 +4524,6 @@ class MlrunProject(ModelObj):
|
|
|
4524
4524
|
"""
|
|
4525
4525
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
4526
4526
|
|
|
4527
|
-
if limit:
|
|
4528
|
-
# TODO: Remove this in 1.11.0
|
|
4529
|
-
warnings.warn(
|
|
4530
|
-
"'limit' is deprecated and will be removed in 1.11.0. Use 'page' and 'page_size' instead.",
|
|
4531
|
-
FutureWarning,
|
|
4532
|
-
)
|
|
4533
|
-
|
|
4534
4527
|
return db.list_artifacts(
|
|
4535
4528
|
name,
|
|
4536
4529
|
self.metadata.name,
|
|
@@ -4544,7 +4537,6 @@ class MlrunProject(ModelObj):
|
|
|
4544
4537
|
category=category,
|
|
4545
4538
|
tree=tree,
|
|
4546
4539
|
format_=format_,
|
|
4547
|
-
limit=limit,
|
|
4548
4540
|
partition_by=partition_by,
|
|
4549
4541
|
rows_per_partition=rows_per_partition,
|
|
4550
4542
|
partition_sort_by=partition_sort_by,
|
|
@@ -4625,7 +4617,6 @@ class MlrunProject(ModelObj):
|
|
|
4625
4617
|
iter: Optional[int] = None,
|
|
4626
4618
|
best_iteration: bool = False,
|
|
4627
4619
|
tree: Optional[str] = None,
|
|
4628
|
-
limit: Optional[int] = None,
|
|
4629
4620
|
format_: Optional[
|
|
4630
4621
|
mlrun.common.formatters.ArtifactFormat
|
|
4631
4622
|
] = mlrun.common.formatters.ArtifactFormat.full,
|
|
@@ -4659,7 +4650,6 @@ class MlrunProject(ModelObj):
|
|
|
4659
4650
|
artifacts generated from a hyper-param run. If only a single iteration exists, will return the artifact
|
|
4660
4651
|
from that iteration. If using ``best_iter``, the ``iter`` parameter must not be used.
|
|
4661
4652
|
:param tree: Return artifacts of the requested tree.
|
|
4662
|
-
:param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
|
|
4663
4653
|
:param format_: The format in which to return the artifacts. Default is 'full'.
|
|
4664
4654
|
"""
|
|
4665
4655
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
@@ -4674,7 +4664,6 @@ class MlrunProject(ModelObj):
|
|
|
4674
4664
|
best_iteration=best_iteration,
|
|
4675
4665
|
kind=mlrun.artifacts.model.ModelArtifact.kind,
|
|
4676
4666
|
tree=tree,
|
|
4677
|
-
limit=limit,
|
|
4678
4667
|
format_=format_,
|
|
4679
4668
|
).to_objects()
|
|
4680
4669
|
|
|
@@ -4897,7 +4886,7 @@ class MlrunProject(ModelObj):
|
|
|
4897
4886
|
):
|
|
4898
4887
|
"""Retrieve a list of functions, filtered by specific criteria.
|
|
4899
4888
|
|
|
4900
|
-
|
|
4889
|
+
Example::
|
|
4901
4890
|
|
|
4902
4891
|
functions = project.list_functions(tag="latest")
|
|
4903
4892
|
|
|
@@ -5035,21 +5024,27 @@ class MlrunProject(ModelObj):
|
|
|
5035
5024
|
include_stats: bool = False,
|
|
5036
5025
|
include_infra: bool = True,
|
|
5037
5026
|
) -> list[mlrun.common.schemas.model_monitoring.FunctionSummary]:
|
|
5038
|
-
"""
|
|
5039
|
-
|
|
5040
|
-
|
|
5027
|
+
"""
|
|
5028
|
+
Get monitoring function summaries for the specified project.
|
|
5029
|
+
|
|
5030
|
+
:param start: The start time of the monitoring applications’ statistics.
|
|
5031
|
+
If not defined, the default is 24 hours ago.
|
|
5032
|
+
Required timezone, applicable only when `include_stats` is set to True.
|
|
5033
|
+
:param end: The end time of the monitoring applications’ statistics.
|
|
5034
|
+
If not defined, the default is now.
|
|
5035
|
+
Required timezone, applicable only when `include_stats` is set to True.
|
|
5041
5036
|
:param names: List of function names to filter by (optional).
|
|
5042
5037
|
:param labels: Labels to filter by (optional).
|
|
5043
5038
|
:param include_stats: Whether to include statistics in the response (default is False).
|
|
5044
|
-
:param include_infra:
|
|
5039
|
+
:param include_infra: Whether to include model monitoring infrastructure functions (default is True).
|
|
5040
|
+
|
|
5045
5041
|
:return: A list of FunctionSummary objects containing information about the monitoring functions.
|
|
5046
5042
|
"""
|
|
5047
5043
|
|
|
5048
|
-
if start is
|
|
5049
|
-
|
|
5050
|
-
|
|
5051
|
-
|
|
5052
|
-
)
|
|
5044
|
+
if (start and start.tzinfo is None) or (end and end.tzinfo is None):
|
|
5045
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
5046
|
+
"Custom start and end times must contain the timezone."
|
|
5047
|
+
)
|
|
5053
5048
|
|
|
5054
5049
|
db = mlrun.db.get_run_db(secrets=self._secrets)
|
|
5055
5050
|
return db.get_monitoring_function_summaries(
|
|
@@ -5069,10 +5064,14 @@ class MlrunProject(ModelObj):
|
|
|
5069
5064
|
end: Optional[datetime.datetime] = None,
|
|
5070
5065
|
include_latest_metrics: bool = False,
|
|
5071
5066
|
) -> mlrun.common.schemas.model_monitoring.FunctionSummary:
|
|
5072
|
-
"""
|
|
5073
|
-
|
|
5074
|
-
|
|
5075
|
-
:param
|
|
5067
|
+
"""
|
|
5068
|
+
Get a monitoring function summary for the specified project and function name.
|
|
5069
|
+
|
|
5070
|
+
:param name: Name of the monitoring function to retrieve the summary for.
|
|
5071
|
+
:param start: The start time of the monitoring applications’ statistics.
|
|
5072
|
+
If not defined, the default is 24 hours ago. Required timezone.
|
|
5073
|
+
:param end: The end time of the monitoring applications’ statistics.
|
|
5074
|
+
If not defined, the default is now. Required timezone.
|
|
5076
5075
|
:param include_latest_metrics: Whether to include the latest metrics in the response (default is False).
|
|
5077
5076
|
|
|
5078
5077
|
:return: A FunctionSummary object containing information about the monitoring function.
|
mlrun/run.py
CHANGED
|
@@ -290,7 +290,7 @@ def get_or_create_ctx(
|
|
|
290
290
|
elif with_env and config:
|
|
291
291
|
newspec = config
|
|
292
292
|
|
|
293
|
-
if isinstance(newspec,
|
|
293
|
+
if isinstance(newspec, RunObject | RunTemplate):
|
|
294
294
|
newspec = newspec.to_dict()
|
|
295
295
|
|
|
296
296
|
if newspec and not isinstance(newspec, dict):
|
|
@@ -345,7 +345,7 @@ def get_or_create_ctx(
|
|
|
345
345
|
def import_function(url="", secrets=None, db="", project=None, new_name=None):
|
|
346
346
|
"""Create function object from DB or local/remote YAML file
|
|
347
347
|
|
|
348
|
-
Functions can be imported from function repositories (
|
|
348
|
+
Functions can be imported from function repositories (MLRun Hub) or local db),
|
|
349
349
|
or be read from a remote URL (http(s), s3, git, v3io, ..) containing the function YAML
|
|
350
350
|
|
|
351
351
|
special URLs::
|
|
@@ -361,7 +361,7 @@ def import_function(url="", secrets=None, db="", project=None, new_name=None):
|
|
|
361
361
|
"https://raw.githubusercontent.com/org/repo/func.yaml"
|
|
362
362
|
)
|
|
363
363
|
|
|
364
|
-
:param url: path/url to
|
|
364
|
+
:param url: path/url to MLRun Hub, db or function YAML file
|
|
365
365
|
:param secrets: optional, credentials dict for DB or URL (s3, v3io, ...)
|
|
366
366
|
:param db: optional, mlrun api/db path
|
|
367
367
|
:param project: optional, target project for the function
|
|
@@ -692,7 +692,7 @@ def code_to_function(
|
|
|
692
692
|
:param description: short function description, defaults to ''
|
|
693
693
|
:param requirements: a list of python packages
|
|
694
694
|
:param requirements_file: path to a python requirements file
|
|
695
|
-
:param categories: list of categories for
|
|
695
|
+
:param categories: list of categories for MLRun Hub, defaults to None
|
|
696
696
|
:param labels: name/value pairs dict to tag the function with useful metadata, defaults to None
|
|
697
697
|
:param with_doc: indicates whether to document the function parameters, defaults to True
|
|
698
698
|
:param ignored_tags: notebook cells to ignore when converting notebooks to py code (separated by ';')
|
|
@@ -735,7 +735,6 @@ def code_to_function(
|
|
|
735
735
|
)
|
|
736
736
|
|
|
737
737
|
"""
|
|
738
|
-
filebase, _ = path.splitext(path.basename(filename))
|
|
739
738
|
ignored_tags = ignored_tags or mlconf.ignored_notebook_tags
|
|
740
739
|
|
|
741
740
|
def add_name(origin, name=""):
|
|
@@ -801,8 +800,6 @@ def code_to_function(
|
|
|
801
800
|
ignored_tags=ignored_tags,
|
|
802
801
|
)
|
|
803
802
|
|
|
804
|
-
mlrun.utils.helpers.validate_function_name(name)
|
|
805
|
-
|
|
806
803
|
spec["spec"]["env"].append(
|
|
807
804
|
{
|
|
808
805
|
"name": "MLRUN_HTTPDB__NUCLIO__EXPLICIT_ACK",
|
mlrun/runtimes/__init__.py
CHANGED
|
@@ -26,6 +26,7 @@ __all__ = [
|
|
|
26
26
|
"KubeResource",
|
|
27
27
|
"ApplicationRuntime",
|
|
28
28
|
"MpiRuntimeV1",
|
|
29
|
+
"RuntimeKinds",
|
|
29
30
|
]
|
|
30
31
|
|
|
31
32
|
import typing
|
|
@@ -34,6 +35,7 @@ from mlrun.runtimes.utils import resolve_spark_operator_version
|
|
|
34
35
|
|
|
35
36
|
from ..common.runtimes.constants import MPIJobCRDVersions
|
|
36
37
|
from .base import BaseRuntime, RunError, RuntimeClassMode # noqa
|
|
38
|
+
from .constants import RuntimeKinds
|
|
37
39
|
from .daskjob import DaskCluster # noqa
|
|
38
40
|
from .databricks_job.databricks_runtime import DatabricksRuntime
|
|
39
41
|
from .kubejob import KubejobRuntime, KubeResource # noqa
|
|
@@ -94,210 +96,6 @@ def new_model_server(
|
|
|
94
96
|
)
|
|
95
97
|
|
|
96
98
|
|
|
97
|
-
class RuntimeKinds:
|
|
98
|
-
remote = "remote"
|
|
99
|
-
nuclio = "nuclio"
|
|
100
|
-
dask = "dask"
|
|
101
|
-
job = "job"
|
|
102
|
-
spark = "spark"
|
|
103
|
-
remotespark = "remote-spark"
|
|
104
|
-
mpijob = "mpijob"
|
|
105
|
-
serving = "serving"
|
|
106
|
-
local = "local"
|
|
107
|
-
handler = "handler"
|
|
108
|
-
databricks = "databricks"
|
|
109
|
-
application = "application"
|
|
110
|
-
|
|
111
|
-
@staticmethod
|
|
112
|
-
def all():
|
|
113
|
-
return [
|
|
114
|
-
RuntimeKinds.remote,
|
|
115
|
-
RuntimeKinds.nuclio,
|
|
116
|
-
RuntimeKinds.serving,
|
|
117
|
-
RuntimeKinds.dask,
|
|
118
|
-
RuntimeKinds.job,
|
|
119
|
-
RuntimeKinds.spark,
|
|
120
|
-
RuntimeKinds.remotespark,
|
|
121
|
-
RuntimeKinds.mpijob,
|
|
122
|
-
RuntimeKinds.local,
|
|
123
|
-
RuntimeKinds.databricks,
|
|
124
|
-
RuntimeKinds.application,
|
|
125
|
-
]
|
|
126
|
-
|
|
127
|
-
@staticmethod
|
|
128
|
-
def runtime_with_handlers():
|
|
129
|
-
return [
|
|
130
|
-
RuntimeKinds.dask,
|
|
131
|
-
RuntimeKinds.job,
|
|
132
|
-
RuntimeKinds.spark,
|
|
133
|
-
RuntimeKinds.remotespark,
|
|
134
|
-
RuntimeKinds.mpijob,
|
|
135
|
-
RuntimeKinds.databricks,
|
|
136
|
-
]
|
|
137
|
-
|
|
138
|
-
@staticmethod
|
|
139
|
-
def abortable_runtimes():
|
|
140
|
-
return [
|
|
141
|
-
RuntimeKinds.job,
|
|
142
|
-
RuntimeKinds.spark,
|
|
143
|
-
RuntimeKinds.remotespark,
|
|
144
|
-
RuntimeKinds.mpijob,
|
|
145
|
-
RuntimeKinds.databricks,
|
|
146
|
-
RuntimeKinds.local,
|
|
147
|
-
RuntimeKinds.handler,
|
|
148
|
-
"",
|
|
149
|
-
]
|
|
150
|
-
|
|
151
|
-
@staticmethod
|
|
152
|
-
def retriable_runtimes():
|
|
153
|
-
return [
|
|
154
|
-
RuntimeKinds.job,
|
|
155
|
-
]
|
|
156
|
-
|
|
157
|
-
@staticmethod
|
|
158
|
-
def nuclio_runtimes():
|
|
159
|
-
return [
|
|
160
|
-
RuntimeKinds.remote,
|
|
161
|
-
RuntimeKinds.nuclio,
|
|
162
|
-
RuntimeKinds.serving,
|
|
163
|
-
RuntimeKinds.application,
|
|
164
|
-
]
|
|
165
|
-
|
|
166
|
-
@staticmethod
|
|
167
|
-
def pure_nuclio_deployed_runtimes():
|
|
168
|
-
return [
|
|
169
|
-
RuntimeKinds.remote,
|
|
170
|
-
RuntimeKinds.nuclio,
|
|
171
|
-
RuntimeKinds.serving,
|
|
172
|
-
]
|
|
173
|
-
|
|
174
|
-
@staticmethod
|
|
175
|
-
def handlerless_runtimes():
|
|
176
|
-
return [
|
|
177
|
-
RuntimeKinds.serving,
|
|
178
|
-
# Application runtime handler is internal reverse proxy
|
|
179
|
-
RuntimeKinds.application,
|
|
180
|
-
]
|
|
181
|
-
|
|
182
|
-
@staticmethod
|
|
183
|
-
def local_runtimes():
|
|
184
|
-
return [
|
|
185
|
-
RuntimeKinds.local,
|
|
186
|
-
RuntimeKinds.handler,
|
|
187
|
-
]
|
|
188
|
-
|
|
189
|
-
@staticmethod
|
|
190
|
-
def is_log_collectable_runtime(kind: typing.Optional[str]):
|
|
191
|
-
"""
|
|
192
|
-
whether log collector can collect logs for that runtime
|
|
193
|
-
:param kind: kind name
|
|
194
|
-
:return: whether log collector can collect logs for that runtime
|
|
195
|
-
"""
|
|
196
|
-
# if local run, the log collector doesn't support it as it is only supports k8s resources
|
|
197
|
-
# when runtime is local the client is responsible for logging the stdout of the run by using `log_std`
|
|
198
|
-
if RuntimeKinds.is_local_runtime(kind):
|
|
199
|
-
return False
|
|
200
|
-
|
|
201
|
-
if (
|
|
202
|
-
kind
|
|
203
|
-
not in [
|
|
204
|
-
# dask implementation is different from other runtimes, because few runs can be run against the same
|
|
205
|
-
# runtime resource, so collecting logs on that runtime resource won't be correct, the way we collect
|
|
206
|
-
# logs for dask is by using `log_std` on client side after we execute the code against the cluster,
|
|
207
|
-
# as submitting the run with the dask client will return the run stdout.
|
|
208
|
-
# For more information head to `DaskCluster._run`.
|
|
209
|
-
RuntimeKinds.dask
|
|
210
|
-
]
|
|
211
|
-
+ RuntimeKinds.nuclio_runtimes()
|
|
212
|
-
):
|
|
213
|
-
return True
|
|
214
|
-
|
|
215
|
-
return False
|
|
216
|
-
|
|
217
|
-
@staticmethod
|
|
218
|
-
def is_local_runtime(kind):
|
|
219
|
-
# "" or None counted as local
|
|
220
|
-
if not kind or kind in RuntimeKinds.local_runtimes():
|
|
221
|
-
return True
|
|
222
|
-
return False
|
|
223
|
-
|
|
224
|
-
@staticmethod
|
|
225
|
-
def requires_k8s_name_validation(kind: str) -> bool:
|
|
226
|
-
"""
|
|
227
|
-
Returns True if the runtime kind creates Kubernetes resources that use the function name.
|
|
228
|
-
|
|
229
|
-
Function names for k8s-deployed runtimes must conform to DNS-1123 label requirements:
|
|
230
|
-
- Lowercase alphanumeric characters or '-'
|
|
231
|
-
- Start and end with an alphanumeric character
|
|
232
|
-
- Maximum 63 characters
|
|
233
|
-
|
|
234
|
-
Local runtimes (local, handler) run on the local machine and don't create k8s resources,
|
|
235
|
-
so they don't require k8s naming validation.
|
|
236
|
-
|
|
237
|
-
:param kind: Runtime kind string (job, spark, serving, local, etc.)
|
|
238
|
-
:return: True if function name needs k8s DNS-1123 validation, False otherwise
|
|
239
|
-
"""
|
|
240
|
-
return not RuntimeKinds.is_local_runtime(kind)
|
|
241
|
-
|
|
242
|
-
@staticmethod
|
|
243
|
-
def requires_absolute_artifacts_path(kind):
|
|
244
|
-
"""
|
|
245
|
-
Returns True if the runtime kind requires absolute artifacts' path (i.e. is local), False otherwise.
|
|
246
|
-
"""
|
|
247
|
-
if RuntimeKinds.is_local_runtime(kind):
|
|
248
|
-
return False
|
|
249
|
-
|
|
250
|
-
if kind not in [
|
|
251
|
-
# logging artifacts is done externally to the dask cluster by a client that can either run locally (in which
|
|
252
|
-
# case the path can be relative) or remotely (in which case the path must be absolute and will be passed
|
|
253
|
-
# to another run)
|
|
254
|
-
RuntimeKinds.dask
|
|
255
|
-
]:
|
|
256
|
-
return True
|
|
257
|
-
return False
|
|
258
|
-
|
|
259
|
-
@staticmethod
|
|
260
|
-
def requires_image_name_for_execution(kind):
|
|
261
|
-
if RuntimeKinds.is_local_runtime(kind):
|
|
262
|
-
return False
|
|
263
|
-
|
|
264
|
-
# both spark and remote spark uses different mechanism for assigning images
|
|
265
|
-
return kind not in [RuntimeKinds.spark, RuntimeKinds.remotespark]
|
|
266
|
-
|
|
267
|
-
@staticmethod
|
|
268
|
-
def supports_from_notebook(kind):
|
|
269
|
-
return kind not in [RuntimeKinds.application]
|
|
270
|
-
|
|
271
|
-
@staticmethod
|
|
272
|
-
def resolve_nuclio_runtime(kind: str, sub_kind: str):
|
|
273
|
-
kind = kind.split(":")[0]
|
|
274
|
-
if kind not in RuntimeKinds.nuclio_runtimes():
|
|
275
|
-
raise ValueError(
|
|
276
|
-
f"Kind {kind} is not a nuclio runtime, available runtimes are {RuntimeKinds.nuclio_runtimes()}"
|
|
277
|
-
)
|
|
278
|
-
|
|
279
|
-
if sub_kind == serving_subkind:
|
|
280
|
-
return ServingRuntime()
|
|
281
|
-
|
|
282
|
-
if kind == RuntimeKinds.application:
|
|
283
|
-
return ApplicationRuntime()
|
|
284
|
-
|
|
285
|
-
runtime = RemoteRuntime()
|
|
286
|
-
runtime.spec.function_kind = sub_kind
|
|
287
|
-
return runtime
|
|
288
|
-
|
|
289
|
-
@staticmethod
|
|
290
|
-
def resolve_nuclio_sub_kind(kind):
|
|
291
|
-
is_nuclio = kind.startswith("nuclio")
|
|
292
|
-
sub_kind = kind[kind.find(":") + 1 :] if is_nuclio and ":" in kind else None
|
|
293
|
-
if kind == RuntimeKinds.serving:
|
|
294
|
-
is_nuclio = True
|
|
295
|
-
sub_kind = serving_subkind
|
|
296
|
-
elif kind == RuntimeKinds.application:
|
|
297
|
-
is_nuclio = True
|
|
298
|
-
return is_nuclio, sub_kind
|
|
299
|
-
|
|
300
|
-
|
|
301
99
|
def get_runtime_class(kind: str):
|
|
302
100
|
if kind == RuntimeKinds.mpijob:
|
|
303
101
|
return MpiRuntimeV1
|