mlrun 1.10.0rc18__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (167) hide show
  1. mlrun/__init__.py +24 -3
  2. mlrun/__main__.py +0 -4
  3. mlrun/artifacts/dataset.py +2 -2
  4. mlrun/artifacts/document.py +6 -1
  5. mlrun/artifacts/llm_prompt.py +21 -15
  6. mlrun/artifacts/model.py +3 -3
  7. mlrun/artifacts/plots.py +1 -1
  8. mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
  9. mlrun/auth/nuclio.py +89 -0
  10. mlrun/auth/providers.py +429 -0
  11. mlrun/auth/utils.py +415 -0
  12. mlrun/common/constants.py +14 -0
  13. mlrun/common/model_monitoring/helpers.py +123 -0
  14. mlrun/common/runtimes/constants.py +28 -0
  15. mlrun/common/schemas/__init__.py +14 -3
  16. mlrun/common/schemas/alert.py +2 -2
  17. mlrun/common/schemas/api_gateway.py +3 -0
  18. mlrun/common/schemas/auth.py +12 -10
  19. mlrun/common/schemas/client_spec.py +4 -0
  20. mlrun/common/schemas/constants.py +25 -0
  21. mlrun/common/schemas/frontend_spec.py +1 -8
  22. mlrun/common/schemas/function.py +34 -0
  23. mlrun/common/schemas/hub.py +33 -20
  24. mlrun/common/schemas/model_monitoring/__init__.py +2 -1
  25. mlrun/common/schemas/model_monitoring/constants.py +12 -15
  26. mlrun/common/schemas/model_monitoring/functions.py +13 -4
  27. mlrun/common/schemas/model_monitoring/model_endpoints.py +11 -0
  28. mlrun/common/schemas/pipeline.py +1 -1
  29. mlrun/common/schemas/secret.py +17 -2
  30. mlrun/common/secrets.py +95 -1
  31. mlrun/common/types.py +10 -10
  32. mlrun/config.py +69 -19
  33. mlrun/data_types/infer.py +2 -2
  34. mlrun/datastore/__init__.py +12 -5
  35. mlrun/datastore/azure_blob.py +162 -47
  36. mlrun/datastore/base.py +274 -10
  37. mlrun/datastore/datastore.py +7 -2
  38. mlrun/datastore/datastore_profile.py +84 -22
  39. mlrun/datastore/model_provider/huggingface_provider.py +225 -41
  40. mlrun/datastore/model_provider/mock_model_provider.py +87 -0
  41. mlrun/datastore/model_provider/model_provider.py +206 -74
  42. mlrun/datastore/model_provider/openai_provider.py +226 -66
  43. mlrun/datastore/s3.py +39 -18
  44. mlrun/datastore/sources.py +1 -1
  45. mlrun/datastore/store_resources.py +4 -4
  46. mlrun/datastore/storeytargets.py +17 -12
  47. mlrun/datastore/targets.py +1 -1
  48. mlrun/datastore/utils.py +25 -6
  49. mlrun/datastore/v3io.py +1 -1
  50. mlrun/db/base.py +63 -32
  51. mlrun/db/httpdb.py +373 -153
  52. mlrun/db/nopdb.py +54 -21
  53. mlrun/errors.py +4 -2
  54. mlrun/execution.py +66 -25
  55. mlrun/feature_store/api.py +1 -1
  56. mlrun/feature_store/common.py +1 -1
  57. mlrun/feature_store/feature_vector_utils.py +1 -1
  58. mlrun/feature_store/steps.py +8 -6
  59. mlrun/frameworks/_common/utils.py +3 -3
  60. mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
  61. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
  62. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
  63. mlrun/frameworks/_ml_common/utils.py +2 -1
  64. mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
  65. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
  66. mlrun/frameworks/onnx/dataset.py +2 -1
  67. mlrun/frameworks/onnx/mlrun_interface.py +2 -1
  68. mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
  69. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
  70. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
  71. mlrun/frameworks/pytorch/utils.py +2 -1
  72. mlrun/frameworks/sklearn/metric.py +2 -1
  73. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
  74. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
  75. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
  76. mlrun/hub/__init__.py +52 -0
  77. mlrun/hub/base.py +142 -0
  78. mlrun/hub/module.py +172 -0
  79. mlrun/hub/step.py +113 -0
  80. mlrun/k8s_utils.py +105 -16
  81. mlrun/launcher/base.py +15 -7
  82. mlrun/launcher/local.py +4 -1
  83. mlrun/model.py +14 -4
  84. mlrun/model_monitoring/__init__.py +0 -1
  85. mlrun/model_monitoring/api.py +65 -28
  86. mlrun/model_monitoring/applications/__init__.py +1 -1
  87. mlrun/model_monitoring/applications/base.py +299 -128
  88. mlrun/model_monitoring/applications/context.py +2 -4
  89. mlrun/model_monitoring/controller.py +132 -58
  90. mlrun/model_monitoring/db/_schedules.py +38 -29
  91. mlrun/model_monitoring/db/_stats.py +6 -16
  92. mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
  93. mlrun/model_monitoring/db/tsdb/base.py +29 -9
  94. mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
  95. mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
  96. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
  97. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
  98. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
  99. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
  100. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
  101. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
  102. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
  103. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
  104. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
  105. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
  106. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
  107. mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
  108. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +20 -9
  109. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +235 -51
  110. mlrun/model_monitoring/features_drift_table.py +2 -1
  111. mlrun/model_monitoring/helpers.py +30 -6
  112. mlrun/model_monitoring/stream_processing.py +34 -28
  113. mlrun/model_monitoring/writer.py +224 -4
  114. mlrun/package/__init__.py +2 -1
  115. mlrun/platforms/__init__.py +0 -43
  116. mlrun/platforms/iguazio.py +8 -4
  117. mlrun/projects/operations.py +17 -11
  118. mlrun/projects/pipelines.py +2 -2
  119. mlrun/projects/project.py +187 -123
  120. mlrun/run.py +95 -21
  121. mlrun/runtimes/__init__.py +2 -186
  122. mlrun/runtimes/base.py +103 -25
  123. mlrun/runtimes/constants.py +225 -0
  124. mlrun/runtimes/daskjob.py +5 -2
  125. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
  126. mlrun/runtimes/local.py +5 -2
  127. mlrun/runtimes/mounts.py +20 -2
  128. mlrun/runtimes/nuclio/__init__.py +12 -7
  129. mlrun/runtimes/nuclio/api_gateway.py +36 -6
  130. mlrun/runtimes/nuclio/application/application.py +339 -40
  131. mlrun/runtimes/nuclio/function.py +222 -72
  132. mlrun/runtimes/nuclio/serving.py +132 -42
  133. mlrun/runtimes/pod.py +213 -21
  134. mlrun/runtimes/utils.py +49 -9
  135. mlrun/secrets.py +99 -14
  136. mlrun/serving/__init__.py +2 -0
  137. mlrun/serving/remote.py +84 -11
  138. mlrun/serving/routers.py +26 -44
  139. mlrun/serving/server.py +138 -51
  140. mlrun/serving/serving_wrapper.py +6 -2
  141. mlrun/serving/states.py +997 -283
  142. mlrun/serving/steps.py +62 -0
  143. mlrun/serving/system_steps.py +149 -95
  144. mlrun/serving/v2_serving.py +9 -10
  145. mlrun/track/trackers/mlflow_tracker.py +29 -31
  146. mlrun/utils/helpers.py +292 -94
  147. mlrun/utils/http.py +9 -2
  148. mlrun/utils/notifications/notification/base.py +18 -0
  149. mlrun/utils/notifications/notification/git.py +3 -5
  150. mlrun/utils/notifications/notification/mail.py +39 -16
  151. mlrun/utils/notifications/notification/slack.py +2 -4
  152. mlrun/utils/notifications/notification/webhook.py +2 -5
  153. mlrun/utils/notifications/notification_pusher.py +3 -3
  154. mlrun/utils/version/version.json +2 -2
  155. mlrun/utils/version/version.py +3 -4
  156. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +63 -74
  157. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +161 -143
  158. mlrun/api/schemas/__init__.py +0 -259
  159. mlrun/db/auth_utils.py +0 -152
  160. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -344
  161. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
  162. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
  163. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1266
  164. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
  165. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
  166. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
  167. {mlrun-1.10.0rc18.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -26,11 +26,11 @@ import typing
26
26
  import uuid
27
27
  import warnings
28
28
  import zipfile
29
+ from collections.abc import Callable
29
30
  from copy import deepcopy
30
31
  from os import environ, makedirs, path
31
- from typing import Callable, Optional, Union, cast
32
+ from typing import Optional, Union, cast
32
33
 
33
- import deprecated
34
34
  import dotenv
35
35
  import git
36
36
  import git.exc
@@ -45,6 +45,7 @@ import mlrun.common.runtimes.constants
45
45
  import mlrun.common.schemas.alert
46
46
  import mlrun.common.schemas.artifact
47
47
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
48
+ import mlrun.common.secrets
48
49
  import mlrun.datastore.datastore_profile
49
50
  import mlrun.db
50
51
  import mlrun.errors
@@ -166,7 +167,7 @@ def new_project(
166
167
  in the project root dir, it will be executed upon project creation or loading.
167
168
 
168
169
 
169
- example::
170
+ Example::
170
171
 
171
172
  # create a project with local and hub functions, a workflow, and an artifact
172
173
  project = mlrun.new_project(
@@ -183,7 +184,7 @@ def new_project(
183
184
  # run the "main" workflow (watch=True to wait for run completion)
184
185
  project.run("main", watch=True)
185
186
 
186
- example (load from template)::
187
+ Example (load from template)::
187
188
 
188
189
  # create a new project from a zip template (can also use yaml/git templates)
189
190
  # initialize a local git, and register the git remote path
@@ -197,7 +198,7 @@ def new_project(
197
198
  project.run("main", watch=True)
198
199
 
199
200
 
200
- example using project_setup.py to init the project objects::
201
+ Example using project_setup.py to init the project objects::
201
202
 
202
203
  def setup(project):
203
204
  project.set_function(
@@ -1281,7 +1282,7 @@ class MlrunProject(ModelObj):
1281
1282
  ) -> str:
1282
1283
  """return the project artifact uri (store://..) from the artifact key
1283
1284
 
1284
- example::
1285
+ Example::
1285
1286
 
1286
1287
  uri = project.get_artifact_uri("my_model", category="model", tag="prod", iter=0)
1287
1288
 
@@ -1459,7 +1460,7 @@ class MlrunProject(ModelObj):
1459
1460
  ):
1460
1461
  """add/set an artifact in the project spec (will be registered on load)
1461
1462
 
1462
- example::
1463
+ Example::
1463
1464
 
1464
1465
  # register a simple file artifact
1465
1466
  project.set_artifact("data", target_path=data_url)
@@ -1610,7 +1611,7 @@ class MlrunProject(ModelObj):
1610
1611
 
1611
1612
  If the artifact already exists with the same key and tag, it will be overwritten.
1612
1613
 
1613
- example::
1614
+ Example::
1614
1615
 
1615
1616
  project.log_artifact(
1616
1617
  "some-data",
@@ -1714,7 +1715,7 @@ class MlrunProject(ModelObj):
1714
1715
 
1715
1716
  If the dataset already exists with the same key and tag, it will be overwritten.
1716
1717
 
1717
- example::
1718
+ Example::
1718
1719
 
1719
1720
  raw_data = {
1720
1721
  "first_name": ["Jason", "Molly", "Tina", "Jake", "Amy"],
@@ -1801,7 +1802,7 @@ class MlrunProject(ModelObj):
1801
1802
 
1802
1803
  If the model already exists with the same key and tag, it will be overwritten.
1803
1804
 
1804
- example::
1805
+ Example::
1805
1806
 
1806
1807
  project.log_model(
1807
1808
  "model",
@@ -1888,7 +1889,7 @@ class MlrunProject(ModelObj):
1888
1889
  prompt_path: Optional[str] = None,
1889
1890
  prompt_legend: Optional[dict] = None,
1890
1891
  model_artifact: Union[ModelArtifact, str] = None,
1891
- model_configuration: Optional[dict] = None,
1892
+ invocation_config: Optional[dict] = None,
1892
1893
  description: Optional[str] = None,
1893
1894
  target_path: Optional[str] = None,
1894
1895
  artifact_path: Optional[str] = None,
@@ -1908,13 +1909,51 @@ class MlrunProject(ModelObj):
1908
1909
 
1909
1910
  Examples::
1910
1911
 
1912
+ # Log directly with an inline prompt template
1913
+ project.log_llm_prompt(
1914
+ key="customer_support_prompt",
1915
+ prompt_template=[
1916
+ {
1917
+ "role": "system",
1918
+ "content": "You are a helpful customer support assistant.",
1919
+ },
1920
+ {
1921
+ "role": "user",
1922
+ "content": "The customer reports: {issue_description}",
1923
+ },
1924
+ ],
1925
+ prompt_legend={
1926
+ "issue_description": {
1927
+ "field": "user_issue",
1928
+ "description": "Detailed description of the customer's issue",
1929
+ },
1930
+ "solution": {
1931
+ "field": "proposed_solution",
1932
+ "description": "Suggested fix for the customer's issue",
1933
+ },
1934
+ },
1935
+ model_artifact=model,
1936
+ invocation_config={"temperature": 0.5, "max_tokens": 200},
1937
+ description="Prompt for handling customer support queries",
1938
+ tag="support-v1",
1939
+ labels={"domain": "support"},
1940
+ )
1941
+
1911
1942
  # Log a prompt from file
1912
1943
  project.log_llm_prompt(
1913
- key="qa-prompt",
1914
- prompt_path="prompts/qa_template.txt",
1915
- prompt_legend={"question": "user_question"},
1944
+ key="qa_prompt",
1945
+ prompt_path="prompts/template.json",
1946
+ prompt_legend={
1947
+ "question": {
1948
+ "field": "user_question",
1949
+ "description": "The actual question asked by the user",
1950
+ }
1951
+ },
1916
1952
  model_artifact=model,
1953
+ invocation_config={"temperature": 0.7, "max_tokens": 256},
1954
+ description="Q&A prompt template with user-provided question",
1917
1955
  tag="v2",
1956
+ labels={"task": "qa", "stage": "experiment"},
1918
1957
  )
1919
1958
 
1920
1959
  :param key: Unique key for the prompt artifact.
@@ -1923,18 +1962,23 @@ class MlrunProject(ModelObj):
1923
1962
  "role": "user", "content": "I need your help with {profession}"]. only "role" and "content" keys allow in any
1924
1963
  str format (upper/lower case), keys will be modified to lower case.
1925
1964
  Cannot be used with `prompt_path`.
1926
- :param prompt_path: Path to a file containing the prompt. Mutually exclusive with `prompt_string`.
1965
+ :param prompt_path: Path to a JSON file containing the prompt template.
1966
+ Cannot be used together with `prompt_template`.
1967
+ The file should define a list of dictionaries in the same format
1968
+ supported by `prompt_template`.
1927
1969
  :param prompt_legend: A dictionary where each key is a placeholder in the prompt (e.g., ``{user_name}``)
1928
1970
  and the value is a dictionary holding two keys, "field", "description". "field" points to the field in
1929
1971
  the event where the value of the place-holder inside the event, if None or not exist will be replaced
1930
1972
  with the place-holder name. "description" will point to explanation of what that placeholder represents.
1931
1973
  Useful for documenting and clarifying dynamic parts of the prompt.
1932
1974
  :param model_artifact: Reference to the parent model (either `ModelArtifact` or model URI string).
1933
- :param model_configuration: Configuration dictionary for model generation parameters
1975
+ :param invocation_config: Configuration dictionary for model generation parameters
1934
1976
  (e.g., temperature, max tokens).
1935
- :param description: Optional description of the prompt.
1936
- :param target_path: Optional local target path for saving prompt content.
1937
- :param artifact_path: Storage path for the logged artifact.
1977
+ :param description: Optional description of the prompt.
1978
+ :param target_path: Absolute target path (instead of using artifact_path + local_path)
1979
+ :param artifact_path: Target artifact path (when not using the default)
1980
+ To define a subpath under the default location use:
1981
+ `artifact_path=context.artifact_subpath('data')`
1938
1982
  :param tag: Version tag for the artifact (e.g., "v1", "latest").
1939
1983
  :param labels: Labels to tag the artifact for filtering and organization.
1940
1984
  :param upload: Whether to upload the artifact to a remote datastore. Defaults to True.
@@ -1955,7 +1999,7 @@ class MlrunProject(ModelObj):
1955
1999
  prompt_path=prompt_path,
1956
2000
  prompt_legend=prompt_legend,
1957
2001
  model_artifact=model_artifact,
1958
- model_configuration=model_configuration,
2002
+ invocation_config=invocation_config,
1959
2003
  target_path=target_path,
1960
2004
  description=description,
1961
2005
  **kwargs,
@@ -2000,11 +2044,12 @@ class MlrunProject(ModelObj):
2000
2044
  This wrapper provides both access to the original vector
2001
2045
  store's capabilities and additional MLRun functionality.
2002
2046
 
2003
- Example:
2004
- >>> vector_store = Chroma(embedding_function=embeddings)
2005
- >>> collection = project.get_vector_store_collection(
2006
- ... vector_store, collection_name="my_collection"
2007
- ... )
2047
+ Example::
2048
+
2049
+ vector_store = Chroma(embedding_function=embeddings)
2050
+ collection = project.get_vector_store_collection(
2051
+ vector_store, collection_name="my_collection"
2052
+ )
2008
2053
  """
2009
2054
  return VectorStoreCollection(
2010
2055
  self,
@@ -2055,16 +2100,17 @@ class MlrunProject(ModelObj):
2055
2100
  :param kwargs: Additional keyword arguments
2056
2101
  :return: DocumentArtifact object
2057
2102
 
2058
- Example:
2059
- >>> # Log a PDF document with custom loader
2060
- >>> project.log_document(
2061
- ... local_path="path/to/doc.pdf",
2062
- ... document_loader=DocumentLoaderSpec(
2063
- ... loader_class_name="langchain_community.document_loaders.PDFLoader",
2064
- ... src_name="file_path",
2065
- ... kwargs={"extract_images": True},
2066
- ... ),
2067
- ... )
2103
+ Example::
2104
+
2105
+ # Log a PDF document with custom loader
2106
+ project.log_document(
2107
+ local_path="path/to/doc.pdf",
2108
+ document_loader=DocumentLoaderSpec(
2109
+ loader_class_name="langchain_community.document_loaders.PDFLoader",
2110
+ src_name="file_path",
2111
+ kwargs={"extract_images": True},
2112
+ ),
2113
+ )
2068
2114
 
2069
2115
  """
2070
2116
  if not key and not local_path and not target_path:
@@ -2343,8 +2389,9 @@ class MlrunProject(ModelObj):
2343
2389
  handler: Optional[str] = None,
2344
2390
  with_repo: Optional[bool] = None,
2345
2391
  tag: Optional[str] = None,
2346
- requirements: Optional[typing.Union[str, list[str]]] = None,
2392
+ requirements: Optional[list[str]] = None,
2347
2393
  requirements_file: str = "",
2394
+ local_path: Optional[str] = None,
2348
2395
  **application_kwargs,
2349
2396
  ) -> mlrun.runtimes.RemoteRuntime:
2350
2397
  """
@@ -2359,7 +2406,8 @@ class MlrunProject(ModelObj):
2359
2406
  )
2360
2407
 
2361
2408
  :param func: Remote function object or spec/code URL. :code:`None` refers to the current
2362
- notebook.
2409
+ notebook. May also be a hub URL of a module of kind model-monitoring-app in the
2410
+ format: hub://[{source}/]{name}[:{tag}].
2363
2411
  :param name: Name of the function (under the project), can be specified with a tag to support
2364
2412
  versions (e.g. myfunc:v1).
2365
2413
  :param image: Docker image to be used, can also be specified in
@@ -2374,6 +2422,8 @@ class MlrunProject(ModelObj):
2374
2422
  :param application_class: Name or an Instance of a class that implements the monitoring application.
2375
2423
  :param application_kwargs: Additional keyword arguments to be passed to the
2376
2424
  monitoring application's constructor.
2425
+ :param local_path: Path to a local directory to save the downloaded monitoring-app code files in,
2426
+ in case 'func' is a hub URL (defaults to current working directory).
2377
2427
  :returns: The model monitoring remote function object.
2378
2428
  """
2379
2429
  (
@@ -2390,6 +2440,7 @@ class MlrunProject(ModelObj):
2390
2440
  tag,
2391
2441
  requirements,
2392
2442
  requirements_file,
2443
+ local_path,
2393
2444
  **application_kwargs,
2394
2445
  )
2395
2446
  # save to project spec
@@ -2468,8 +2519,9 @@ class MlrunProject(ModelObj):
2468
2519
  handler: typing.Optional[str] = None,
2469
2520
  with_repo: typing.Optional[bool] = None,
2470
2521
  tag: typing.Optional[str] = None,
2471
- requirements: typing.Union[str, list[str], None] = None,
2522
+ requirements: typing.Union[list[str], None] = None,
2472
2523
  requirements_file: str = "",
2524
+ local_path: typing.Optional[str] = None,
2473
2525
  **application_kwargs,
2474
2526
  ) -> tuple[str, mlrun.runtimes.RemoteRuntime, dict]:
2475
2527
  import mlrun.model_monitoring.api
@@ -2486,6 +2538,7 @@ class MlrunProject(ModelObj):
2486
2538
  tag=tag,
2487
2539
  requirements=requirements,
2488
2540
  requirements_file=requirements_file,
2541
+ local_path=local_path,
2489
2542
  **application_kwargs,
2490
2543
  )
2491
2544
  elif isinstance(func, str) and isinstance(handler, str):
@@ -2531,7 +2584,7 @@ class MlrunProject(ModelObj):
2531
2584
  *,
2532
2585
  deploy_histogram_data_drift_app: bool = True,
2533
2586
  wait_for_deployment: bool = False,
2534
- fetch_credentials_from_sys_config: bool = False,
2587
+ fetch_credentials_from_sys_config: bool = False, # deprecated
2535
2588
  ) -> None:
2536
2589
  """
2537
2590
  Deploy model monitoring application controller, writer and stream functions.
@@ -2566,14 +2619,20 @@ class MlrunProject(ModelObj):
2566
2619
  :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2567
2620
  Otherwise, deploy the model monitoring infrastructure on the
2568
2621
  background, including the histogram data drift app if selected.
2569
- :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2622
+ :param fetch_credentials_from_sys_config: Deprecated. If true, fetch the credentials from the project
2623
+ configuration.
2570
2624
  """
2625
+ if fetch_credentials_from_sys_config:
2626
+ warnings.warn(
2627
+ "`fetch_credentials_from_sys_config` is deprecated in 1.10.0 and will be removed in 1.12.0.",
2628
+ # TODO: Remove this in 1.12.0
2629
+ FutureWarning,
2630
+ )
2571
2631
  if base_period < 10:
2572
2632
  logger.warn(
2573
2633
  "enable_model_monitoring: 'base_period' < 10 minutes is not supported in production environments",
2574
2634
  project=self.name,
2575
2635
  )
2576
-
2577
2636
  db = mlrun.db.get_run_db(secrets=self._secrets)
2578
2637
  db.enable_model_monitoring(
2579
2638
  project=self.name,
@@ -2706,16 +2765,18 @@ class MlrunProject(ModelObj):
2706
2765
  | Creating a function with non project source is done by specifying a module ``handler`` and on the
2707
2766
  returned function set the source with ``function.with_source_archive(<source>)``.
2708
2767
 
2709
- Support URL prefixes:
2768
+ Supported URL prefixes:
2710
2769
 
2711
2770
  | Object (s3://, v3io://, ..)
2712
2771
  | MLRun DB e.g. db://project/func:ver
2713
- | Functions hub/market: e.g. hub://auto-trainer:master
2772
+ | Hub/market: e.g. hub://auto-trainer:master
2714
2773
 
2715
2774
  Examples::
2716
2775
 
2717
2776
  proj.set_function(func_object)
2718
- proj.set_function("http://.../mynb.ipynb", "train")
2777
+ proj.set_function(
2778
+ "http://.../mynb.ipynb", "train", kind="job", image="mlrun/mlrun"
2779
+ )
2719
2780
  proj.set_function("./func.yaml")
2720
2781
  proj.set_function("hub://get_toy_data", "getdata")
2721
2782
 
@@ -2742,18 +2803,6 @@ class MlrunProject(ModelObj):
2742
2803
  # By providing a path to a pip requirements file
2743
2804
  proj.set_function("my.py", requirements="requirements.txt")
2744
2805
 
2745
- One of the most important parameters is 'kind', used to specify the chosen runtime. The options are:
2746
- - local: execute a local python or shell script
2747
- - job: insert the code into a Kubernetes pod and execute it
2748
- - nuclio: insert the code into a real-time serverless nuclio function
2749
- - serving: insert code into orchestrated nuclio function(s) forming a DAG
2750
- - dask: run the specified python code / script as Dask Distributed job
2751
- - mpijob: run distributed Horovod jobs over the MPI job operator
2752
- - spark: run distributed Spark job using Spark Kubernetes Operator
2753
- - remote-spark: run distributed Spark job on remote Spark service
2754
- - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2755
- - application: run a long living application (e.g. a web server, UI, etc.)
2756
-
2757
2806
  Learn more about :doc:`../../concepts/functions-overview`.
2758
2807
 
2759
2808
  :param func: Function object or spec/code url, None refers to current Notebook
@@ -2761,8 +2810,20 @@ class MlrunProject(ModelObj):
2761
2810
  Versions (e.g. myfunc:v1). If the `tag` parameter is provided, the tag in the name
2762
2811
  must match the tag parameter.
2763
2812
  Specifying a tag in the name will update the project's tagged function (myfunc:v1)
2764
- :param kind: Runtime kind e.g. job, nuclio, spark, dask, mpijob
2765
- Default: job
2813
+ :param kind: Default: job. One of
2814
+
2815
+ - local: execute a local python or shell script
2816
+ - job: insert the code into a Kubernetes pod and execute it
2817
+ - nuclio: insert the code into a real-time serverless nuclio function
2818
+ - serving: insert code into orchestrated nuclio function(s) forming a DAG
2819
+ - dask: run the specified python code / script as Dask Distributed job
2820
+ - mpijob: run distributed Horovod jobs over the MPI job operator
2821
+ - spark: run distributed Spark job using Spark Kubernetes Operator
2822
+ - remote-spark: run distributed Spark job on remote Spark service
2823
+ - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2824
+ - application: run a long living application (e.g. a web server, UI, etc.)
2825
+ - handler: execute a python handler (used automatically in notebooks or for debug)
2826
+
2766
2827
  :param image: Docker image to be used, can also be specified in the function object/yaml
2767
2828
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
2768
2829
  :param with_repo: Add (clone) the current repo to the build source - use when the function code is in
@@ -2957,20 +3018,6 @@ class MlrunProject(ModelObj):
2957
3018
 
2958
3019
  self.spec.set_function(name, function_object, func)
2959
3020
 
2960
- # TODO: Remove this in 1.11.0
2961
- @deprecated.deprecated(
2962
- version="1.8.0",
2963
- reason="'remove_function' is deprecated and will be removed in 1.11.0. "
2964
- "Please use `delete_function` instead.",
2965
- category=FutureWarning,
2966
- )
2967
- def remove_function(self, name):
2968
- """remove the specified function from the project
2969
-
2970
- :param name: name of the function (under the project)
2971
- """
2972
- self.spec.remove_function(name)
2973
-
2974
3021
  def delete_function(self, name, delete_from_db=False):
2975
3022
  """deletes the specified function from the project
2976
3023
 
@@ -3360,7 +3407,12 @@ class MlrunProject(ModelObj):
3360
3407
  self._initialized = True
3361
3408
  return self.spec._function_objects
3362
3409
 
3363
- def with_secrets(self, kind, source, prefix=""):
3410
+ def with_secrets(
3411
+ self,
3412
+ kind,
3413
+ source,
3414
+ prefix="",
3415
+ ):
3364
3416
  """register a secrets source (file, env or dict)
3365
3417
 
3366
3418
  read secrets from a source provider to be used in workflows, example::
@@ -3382,12 +3434,19 @@ class MlrunProject(ModelObj):
3382
3434
 
3383
3435
  This will enable access to all secrets in vault registered to the current project.
3384
3436
 
3385
- :param kind: secret type (file, inline, env, vault)
3437
+ :param kind: secret type (file, inline, env, vault, azure_vault)
3386
3438
  :param source: secret data or link (see example)
3387
3439
  :param prefix: add a prefix to the keys in this source
3388
3440
 
3389
3441
  :returns: project object
3390
3442
  """
3443
+ # Block using mlrun-auth-secrets.* via azure_vault's k8s_secret param (client-side only)
3444
+ if kind == "azure_vault" and isinstance(source, dict):
3445
+ candidate_secret_name = (source.get("k8s_secret") or "").strip()
3446
+ if candidate_secret_name:
3447
+ mlrun.common.secrets.validate_not_forbidden_secret(
3448
+ candidate_secret_name
3449
+ )
3391
3450
 
3392
3451
  if kind == "vault" and isinstance(source, list):
3393
3452
  source = {"project": self.metadata.name, "secrets": source}
@@ -3414,7 +3473,7 @@ class MlrunProject(ModelObj):
3414
3473
  when using a secrets file it should have lines in the form KEY=VALUE, comment line start with "#"
3415
3474
  V3IO paths/credentials and MLrun service API address are dropped from the secrets
3416
3475
 
3417
- example secrets file:
3476
+ Example secrets file:
3418
3477
 
3419
3478
  .. code-block:: shell
3420
3479
 
@@ -3765,30 +3824,31 @@ class MlrunProject(ModelObj):
3765
3824
  Please note that you have to set the credentials before deploying any model monitoring application
3766
3825
  or a tracked serving function.
3767
3826
 
3768
- For example, the full flow for enabling model monitoring infrastructure with **TDEngine** and **Kafka**, is:
3827
+ For example, the full flow for enabling model monitoring infrastructure with **TimescaleDB** and **Kafka**, is:
3769
3828
 
3770
3829
  .. code-block:: python
3771
3830
 
3772
3831
  import mlrun
3773
3832
  from mlrun.datastore.datastore_profile import (
3774
- DatastoreProfileKafkaSource,
3775
- DatastoreProfileTDEngine,
3833
+ DatastoreProfileKafkaStream,
3834
+ DatastoreProfilePostgreSQL,
3776
3835
  )
3777
3836
 
3778
3837
  project = mlrun.get_or_create_project("mm-infra-setup")
3779
3838
 
3780
3839
  # Create and register TSDB profile
3781
- tsdb_profile = DatastoreProfileTDEngine(
3782
- name="my-tdengine",
3783
- host="<tdengine-server-ip-address>",
3784
- port=6041,
3785
- user="username",
3786
- password="<tdengine-password>",
3840
+ tsdb_profile = DatastoreProfilePostgreSQL(
3841
+ name="my-timescaledb",
3842
+ host="<timescaledb-server-ip-address>",
3843
+ port=5432,
3844
+ user="postgres",
3845
+ password="<timescaledb-password>",
3846
+ database="mlrun",
3787
3847
  )
3788
3848
  project.register_datastore_profile(tsdb_profile)
3789
3849
 
3790
3850
  # Create and register stream profile
3791
- stream_profile = DatastoreProfileKafkaSource(
3851
+ stream_profile = DatastoreProfileKafkaStream(
3792
3852
  name="my-kafka",
3793
3853
  brokers=["<kafka-broker-ip-address>:9094"],
3794
3854
  topics=[], # Keep the topics list empty
@@ -3830,9 +3890,9 @@ class MlrunProject(ModelObj):
3830
3890
 
3831
3891
  .. code-block:: python
3832
3892
 
3833
- from mlrun.datastore.datastore_profile import DatastoreProfileKafkaSource
3893
+ from mlrun.datastore.datastore_profile import DatastoreProfileKafkaStream
3834
3894
 
3835
- stream_profile = DatastoreProfileKafkaSource(
3895
+ stream_profile = DatastoreProfileKafkaStream(
3836
3896
  name="confluent-kafka",
3837
3897
  brokers=["<server-domain-start>.confluent.cloud:9092"],
3838
3898
  topics=[],
@@ -3854,14 +3914,14 @@ class MlrunProject(ModelObj):
3854
3914
  monitoring. The supported profiles are:
3855
3915
 
3856
3916
  * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3857
- * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileTDEngine`
3917
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfilePostgreSQL`
3858
3918
 
3859
3919
  You need to register one of them, and pass the profile's name.
3860
3920
  :param stream_profile_name: The datastore profile name of the stream to be used in model monitoring.
3861
3921
  The supported profiles are:
3862
3922
 
3863
3923
  * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3864
- * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource`
3924
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaStream`
3865
3925
 
3866
3926
  You need to register one of them, and pass the profile's name.
3867
3927
  :param replace_creds: If ``True`` - override the existing credentials.
@@ -3901,7 +3961,9 @@ class MlrunProject(ModelObj):
3901
3961
  start: Optional[datetime.datetime] = None,
3902
3962
  end: Optional[datetime.datetime] = None,
3903
3963
  top_level: bool = False,
3904
- mode: Optional[mlrun.common.schemas.EndpointMode] = None,
3964
+ modes: Optional[
3965
+ Union[mm_constants.EndpointMode, list[mm_constants.EndpointMode]]
3966
+ ] = None,
3905
3967
  uids: Optional[list[str]] = None,
3906
3968
  latest_only: bool = False,
3907
3969
  tsdb_metrics: bool = False,
@@ -3917,7 +3979,7 @@ class MlrunProject(ModelObj):
3917
3979
  5) function_tag
3918
3980
  6) labels
3919
3981
  7) top level
3920
- 8) mode
3982
+ 8) modes
3921
3983
  9) uids
3922
3984
  10) start and end time, corresponding to the `created` field.
3923
3985
  By default, when no filters are applied, all available endpoints for the given project will be listed.
@@ -3939,8 +4001,8 @@ class MlrunProject(ModelObj):
3939
4001
  :param start: The start time to filter by.Corresponding to the `created` field.
3940
4002
  :param end: The end time to filter by. Corresponding to the `created` field.
3941
4003
  :param top_level: If true will return only routers and endpoint that are NOT children of any router.
3942
- :param mode: Specifies the mode of the model endpoint. Can be "real-time", "batch", or both if set
3943
- to None.
4004
+ :param modes: Specifies the mode of the model endpoint. Can be "real-time" (0), "batch" (1),
4005
+ "batch_legacy" (2). If set to None, all are included.
3944
4006
  :param uids: If passed will return a list `ModelEndpoint` object with uid in uids.
3945
4007
  :param tsdb_metrics: When True, the time series metrics will be added to the output
3946
4008
  of the resulting.
@@ -3962,7 +4024,7 @@ class MlrunProject(ModelObj):
3962
4024
  start=start,
3963
4025
  end=end,
3964
4026
  top_level=top_level,
3965
- mode=mode,
4027
+ modes=modes,
3966
4028
  uids=uids,
3967
4029
  latest_only=latest_only,
3968
4030
  tsdb_metrics=tsdb_metrics,
@@ -3998,7 +4060,7 @@ class MlrunProject(ModelObj):
3998
4060
  ) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
3999
4061
  """Run a local or remote task as part of a local/kubeflow pipeline
4000
4062
 
4001
- example (use with project)::
4063
+ Example (use with project)::
4002
4064
 
4003
4065
  # create a project with two functions (local and from hub)
4004
4066
  project = mlrun.new_project(project_name, "./proj")
@@ -4057,7 +4119,12 @@ class MlrunProject(ModelObj):
4057
4119
  This ensures latest code changes are executed. This argument must be used in
4058
4120
  conjunction with the local=True argument.
4059
4121
  :param output_path: path to store artifacts, when running in a workflow this will be set automatically
4060
- :param retry: Retry configuration for the run, can be a dict or an instance of mlrun.model.Retry.
4122
+ :param retry: Retry configuration for the run, can be a dict or an instance of
4123
+ :py:class:`~mlrun.model.Retry`.
4124
+ The `count` field in the `Retry` object specifies the number of retry attempts.
4125
+ If `count=0`, the run will not be retried.
4126
+ The `backoff` field specifies the retry backoff strategy between retry attempts.
4127
+ If not provided, the default backoff delay is 30 seconds.
4061
4128
  :return: MLRun RunObject or PipelineNodeWrapper
4062
4129
  """
4063
4130
  if artifact_path:
@@ -4397,7 +4464,6 @@ class MlrunProject(ModelObj):
4397
4464
  kind: Optional[str] = None,
4398
4465
  category: typing.Union[str, mlrun.common.schemas.ArtifactCategories] = None,
4399
4466
  tree: Optional[str] = None,
4400
- limit: Optional[int] = None,
4401
4467
  format_: Optional[
4402
4468
  mlrun.common.formatters.ArtifactFormat
4403
4469
  ] = mlrun.common.formatters.ArtifactFormat.full,
@@ -4447,7 +4513,6 @@ class MlrunProject(ModelObj):
4447
4513
  :param kind: Return artifacts of the requested kind.
4448
4514
  :param category: Return artifacts of the requested category.
4449
4515
  :param tree: Return artifacts of the requested tree.
4450
- :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
4451
4516
  :param format_: The format in which to return the artifacts. Default is 'full'.
4452
4517
  :param partition_by: Field to group results by. When `partition_by` is specified, the `partition_sort_by`
4453
4518
  parameter must be provided as well.
@@ -4459,13 +4524,6 @@ class MlrunProject(ModelObj):
4459
4524
  """
4460
4525
  db = mlrun.db.get_run_db(secrets=self._secrets)
4461
4526
 
4462
- if limit:
4463
- # TODO: Remove this in 1.11.0
4464
- warnings.warn(
4465
- "'limit' is deprecated and will be removed in 1.11.0. Use 'page' and 'page_size' instead.",
4466
- FutureWarning,
4467
- )
4468
-
4469
4527
  return db.list_artifacts(
4470
4528
  name,
4471
4529
  self.metadata.name,
@@ -4479,7 +4537,6 @@ class MlrunProject(ModelObj):
4479
4537
  category=category,
4480
4538
  tree=tree,
4481
4539
  format_=format_,
4482
- limit=limit,
4483
4540
  partition_by=partition_by,
4484
4541
  rows_per_partition=rows_per_partition,
4485
4542
  partition_sort_by=partition_sort_by,
@@ -4560,7 +4617,6 @@ class MlrunProject(ModelObj):
4560
4617
  iter: Optional[int] = None,
4561
4618
  best_iteration: bool = False,
4562
4619
  tree: Optional[str] = None,
4563
- limit: Optional[int] = None,
4564
4620
  format_: Optional[
4565
4621
  mlrun.common.formatters.ArtifactFormat
4566
4622
  ] = mlrun.common.formatters.ArtifactFormat.full,
@@ -4594,7 +4650,6 @@ class MlrunProject(ModelObj):
4594
4650
  artifacts generated from a hyper-param run. If only a single iteration exists, will return the artifact
4595
4651
  from that iteration. If using ``best_iter``, the ``iter`` parameter must not be used.
4596
4652
  :param tree: Return artifacts of the requested tree.
4597
- :param limit: Deprecated - Maximum number of artifacts to return (will be removed in 1.11.0).
4598
4653
  :param format_: The format in which to return the artifacts. Default is 'full'.
4599
4654
  """
4600
4655
  db = mlrun.db.get_run_db(secrets=self._secrets)
@@ -4609,7 +4664,6 @@ class MlrunProject(ModelObj):
4609
4664
  best_iteration=best_iteration,
4610
4665
  kind=mlrun.artifacts.model.ModelArtifact.kind,
4611
4666
  tree=tree,
4612
- limit=limit,
4613
4667
  format_=format_,
4614
4668
  ).to_objects()
4615
4669
 
@@ -4832,7 +4886,7 @@ class MlrunProject(ModelObj):
4832
4886
  ):
4833
4887
  """Retrieve a list of functions, filtered by specific criteria.
4834
4888
 
4835
- example::
4889
+ Example::
4836
4890
 
4837
4891
  functions = project.list_functions(tag="latest")
4838
4892
 
@@ -4970,21 +5024,27 @@ class MlrunProject(ModelObj):
4970
5024
  include_stats: bool = False,
4971
5025
  include_infra: bool = True,
4972
5026
  ) -> list[mlrun.common.schemas.model_monitoring.FunctionSummary]:
4973
- """Get monitoring function summaries for the specified project.
4974
- :param start: Start time for filtering the results (optional).
4975
- :param end: End time for filtering the results (optional).
5027
+ """
5028
+ Get monitoring function summaries for the specified project.
5029
+
5030
+ :param start: The start time of the monitoring applications’ statistics.
5031
+ If not defined, the default is 24 hours ago.
5032
+ Required timezone, applicable only when `include_stats` is set to True.
5033
+ :param end: The end time of the monitoring applications’ statistics.
5034
+ If not defined, the default is now.
5035
+ Required timezone, applicable only when `include_stats` is set to True.
4976
5036
  :param names: List of function names to filter by (optional).
4977
5037
  :param labels: Labels to filter by (optional).
4978
5038
  :param include_stats: Whether to include statistics in the response (default is False).
4979
- :param include_infra: whether to include model monitoring infrastructure functions (default is True).
5039
+ :param include_infra: Whether to include model monitoring infrastructure functions (default is True).
5040
+
4980
5041
  :return: A list of FunctionSummary objects containing information about the monitoring functions.
4981
5042
  """
4982
5043
 
4983
- if start is not None and end is not None:
4984
- if start.tzinfo is None or end.tzinfo is None:
4985
- raise mlrun.errors.MLRunInvalidArgumentTypeError(
4986
- "Custom start and end times must contain the timezone."
4987
- )
5044
+ if (start and start.tzinfo is None) or (end and end.tzinfo is None):
5045
+ raise mlrun.errors.MLRunInvalidArgumentError(
5046
+ "Custom start and end times must contain the timezone."
5047
+ )
4988
5048
 
4989
5049
  db = mlrun.db.get_run_db(secrets=self._secrets)
4990
5050
  return db.get_monitoring_function_summaries(
@@ -5004,10 +5064,14 @@ class MlrunProject(ModelObj):
5004
5064
  end: Optional[datetime.datetime] = None,
5005
5065
  include_latest_metrics: bool = False,
5006
5066
  ) -> mlrun.common.schemas.model_monitoring.FunctionSummary:
5007
- """Get a monitoring function summary for the specified project and function name.
5008
- :param name: Name of the monitoring function to retrieve the summary for.
5009
- :param start: Start time for filtering the results (optional).
5010
- :param end: End time for filtering the results (optional).
5067
+ """
5068
+ Get a monitoring function summary for the specified project and function name.
5069
+
5070
+ :param name: Name of the monitoring function to retrieve the summary for.
5071
+ :param start: The start time of the monitoring applications’ statistics.
5072
+ If not defined, the default is 24 hours ago. Required timezone.
5073
+ :param end: The end time of the monitoring applications’ statistics.
5074
+ If not defined, the default is now. Required timezone.
5011
5075
  :param include_latest_metrics: Whether to include the latest metrics in the response (default is False).
5012
5076
 
5013
5077
  :return: A FunctionSummary object containing information about the monitoring function.