mlrun 1.10.0rc16__py3-none-any.whl → 1.10.1rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (101) hide show
  1. mlrun/__init__.py +22 -2
  2. mlrun/artifacts/document.py +6 -1
  3. mlrun/artifacts/llm_prompt.py +21 -15
  4. mlrun/artifacts/model.py +3 -3
  5. mlrun/common/constants.py +9 -0
  6. mlrun/common/formatters/artifact.py +1 -0
  7. mlrun/common/model_monitoring/helpers.py +86 -0
  8. mlrun/common/schemas/__init__.py +2 -0
  9. mlrun/common/schemas/auth.py +2 -0
  10. mlrun/common/schemas/function.py +10 -0
  11. mlrun/common/schemas/hub.py +30 -18
  12. mlrun/common/schemas/model_monitoring/__init__.py +2 -0
  13. mlrun/common/schemas/model_monitoring/constants.py +30 -6
  14. mlrun/common/schemas/model_monitoring/functions.py +13 -4
  15. mlrun/common/schemas/model_monitoring/model_endpoints.py +11 -0
  16. mlrun/common/schemas/pipeline.py +1 -1
  17. mlrun/common/schemas/serving.py +3 -0
  18. mlrun/common/schemas/workflow.py +1 -0
  19. mlrun/common/secrets.py +22 -1
  20. mlrun/config.py +34 -21
  21. mlrun/datastore/__init__.py +11 -3
  22. mlrun/datastore/azure_blob.py +162 -47
  23. mlrun/datastore/base.py +265 -7
  24. mlrun/datastore/datastore.py +10 -5
  25. mlrun/datastore/datastore_profile.py +61 -5
  26. mlrun/datastore/model_provider/huggingface_provider.py +367 -0
  27. mlrun/datastore/model_provider/mock_model_provider.py +87 -0
  28. mlrun/datastore/model_provider/model_provider.py +211 -74
  29. mlrun/datastore/model_provider/openai_provider.py +243 -71
  30. mlrun/datastore/s3.py +24 -2
  31. mlrun/datastore/store_resources.py +4 -4
  32. mlrun/datastore/storeytargets.py +2 -3
  33. mlrun/datastore/utils.py +15 -3
  34. mlrun/db/base.py +27 -19
  35. mlrun/db/httpdb.py +57 -48
  36. mlrun/db/nopdb.py +25 -10
  37. mlrun/execution.py +55 -13
  38. mlrun/hub/__init__.py +15 -0
  39. mlrun/hub/module.py +181 -0
  40. mlrun/k8s_utils.py +105 -16
  41. mlrun/launcher/base.py +13 -6
  42. mlrun/launcher/local.py +2 -0
  43. mlrun/model.py +9 -3
  44. mlrun/model_monitoring/api.py +66 -27
  45. mlrun/model_monitoring/applications/__init__.py +1 -1
  46. mlrun/model_monitoring/applications/base.py +388 -138
  47. mlrun/model_monitoring/applications/context.py +2 -4
  48. mlrun/model_monitoring/applications/results.py +4 -7
  49. mlrun/model_monitoring/controller.py +239 -101
  50. mlrun/model_monitoring/db/_schedules.py +36 -13
  51. mlrun/model_monitoring/db/_stats.py +4 -3
  52. mlrun/model_monitoring/db/tsdb/base.py +29 -9
  53. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +4 -5
  54. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +154 -50
  55. mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +51 -0
  56. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +17 -4
  57. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +245 -51
  58. mlrun/model_monitoring/helpers.py +28 -5
  59. mlrun/model_monitoring/stream_processing.py +45 -14
  60. mlrun/model_monitoring/writer.py +220 -1
  61. mlrun/platforms/__init__.py +3 -2
  62. mlrun/platforms/iguazio.py +7 -3
  63. mlrun/projects/operations.py +16 -11
  64. mlrun/projects/pipelines.py +2 -2
  65. mlrun/projects/project.py +157 -69
  66. mlrun/run.py +97 -20
  67. mlrun/runtimes/__init__.py +18 -0
  68. mlrun/runtimes/base.py +14 -6
  69. mlrun/runtimes/daskjob.py +1 -0
  70. mlrun/runtimes/local.py +5 -2
  71. mlrun/runtimes/mounts.py +20 -2
  72. mlrun/runtimes/nuclio/__init__.py +1 -0
  73. mlrun/runtimes/nuclio/application/application.py +147 -17
  74. mlrun/runtimes/nuclio/function.py +72 -27
  75. mlrun/runtimes/nuclio/serving.py +102 -20
  76. mlrun/runtimes/pod.py +213 -21
  77. mlrun/runtimes/utils.py +49 -9
  78. mlrun/secrets.py +54 -13
  79. mlrun/serving/remote.py +79 -6
  80. mlrun/serving/routers.py +23 -41
  81. mlrun/serving/server.py +230 -40
  82. mlrun/serving/states.py +605 -232
  83. mlrun/serving/steps.py +62 -0
  84. mlrun/serving/system_steps.py +136 -81
  85. mlrun/serving/v2_serving.py +9 -10
  86. mlrun/utils/helpers.py +215 -83
  87. mlrun/utils/logger.py +3 -1
  88. mlrun/utils/notifications/notification/base.py +18 -0
  89. mlrun/utils/notifications/notification/git.py +2 -4
  90. mlrun/utils/notifications/notification/mail.py +38 -15
  91. mlrun/utils/notifications/notification/slack.py +2 -4
  92. mlrun/utils/notifications/notification/webhook.py +2 -5
  93. mlrun/utils/notifications/notification_pusher.py +1 -1
  94. mlrun/utils/version/version.json +2 -2
  95. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/METADATA +51 -50
  96. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/RECORD +100 -95
  97. mlrun/api/schemas/__init__.py +0 -259
  98. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/WHEEL +0 -0
  99. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/entry_points.txt +0 -0
  100. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/licenses/LICENSE +0 -0
  101. {mlrun-1.10.0rc16.dist-info → mlrun-1.10.1rc4.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -45,6 +45,7 @@ import mlrun.common.runtimes.constants
45
45
  import mlrun.common.schemas.alert
46
46
  import mlrun.common.schemas.artifact
47
47
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
48
+ import mlrun.common.secrets
48
49
  import mlrun.datastore.datastore_profile
49
50
  import mlrun.db
50
51
  import mlrun.errors
@@ -166,7 +167,7 @@ def new_project(
166
167
  in the project root dir, it will be executed upon project creation or loading.
167
168
 
168
169
 
169
- example::
170
+ Example::
170
171
 
171
172
  # create a project with local and hub functions, a workflow, and an artifact
172
173
  project = mlrun.new_project(
@@ -183,7 +184,7 @@ def new_project(
183
184
  # run the "main" workflow (watch=True to wait for run completion)
184
185
  project.run("main", watch=True)
185
186
 
186
- example (load from template)::
187
+ Example (load from template)::
187
188
 
188
189
  # create a new project from a zip template (can also use yaml/git templates)
189
190
  # initialize a local git, and register the git remote path
@@ -197,7 +198,7 @@ def new_project(
197
198
  project.run("main", watch=True)
198
199
 
199
200
 
200
- example using project_setup.py to init the project objects::
201
+ Example using project_setup.py to init the project objects::
201
202
 
202
203
  def setup(project):
203
204
  project.set_function(
@@ -1281,7 +1282,7 @@ class MlrunProject(ModelObj):
1281
1282
  ) -> str:
1282
1283
  """return the project artifact uri (store://..) from the artifact key
1283
1284
 
1284
- example::
1285
+ Example::
1285
1286
 
1286
1287
  uri = project.get_artifact_uri("my_model", category="model", tag="prod", iter=0)
1287
1288
 
@@ -1459,7 +1460,7 @@ class MlrunProject(ModelObj):
1459
1460
  ):
1460
1461
  """add/set an artifact in the project spec (will be registered on load)
1461
1462
 
1462
- example::
1463
+ Example::
1463
1464
 
1464
1465
  # register a simple file artifact
1465
1466
  project.set_artifact("data", target_path=data_url)
@@ -1610,7 +1611,7 @@ class MlrunProject(ModelObj):
1610
1611
 
1611
1612
  If the artifact already exists with the same key and tag, it will be overwritten.
1612
1613
 
1613
- example::
1614
+ Example::
1614
1615
 
1615
1616
  project.log_artifact(
1616
1617
  "some-data",
@@ -1714,7 +1715,7 @@ class MlrunProject(ModelObj):
1714
1715
 
1715
1716
  If the dataset already exists with the same key and tag, it will be overwritten.
1716
1717
 
1717
- example::
1718
+ Example::
1718
1719
 
1719
1720
  raw_data = {
1720
1721
  "first_name": ["Jason", "Molly", "Tina", "Jake", "Amy"],
@@ -1801,7 +1802,7 @@ class MlrunProject(ModelObj):
1801
1802
 
1802
1803
  If the model already exists with the same key and tag, it will be overwritten.
1803
1804
 
1804
- example::
1805
+ Example::
1805
1806
 
1806
1807
  project.log_model(
1807
1808
  "model",
@@ -1888,7 +1889,7 @@ class MlrunProject(ModelObj):
1888
1889
  prompt_path: Optional[str] = None,
1889
1890
  prompt_legend: Optional[dict] = None,
1890
1891
  model_artifact: Union[ModelArtifact, str] = None,
1891
- model_configuration: Optional[dict] = None,
1892
+ invocation_config: Optional[dict] = None,
1892
1893
  description: Optional[str] = None,
1893
1894
  target_path: Optional[str] = None,
1894
1895
  artifact_path: Optional[str] = None,
@@ -1908,13 +1909,51 @@ class MlrunProject(ModelObj):
1908
1909
 
1909
1910
  Examples::
1910
1911
 
1912
+ # Log directly with an inline prompt template
1913
+ project.log_llm_prompt(
1914
+ key="customer_support_prompt",
1915
+ prompt_template=[
1916
+ {
1917
+ "role": "system",
1918
+ "content": "You are a helpful customer support assistant.",
1919
+ },
1920
+ {
1921
+ "role": "user",
1922
+ "content": "The customer reports: {issue_description}",
1923
+ },
1924
+ ],
1925
+ prompt_legend={
1926
+ "issue_description": {
1927
+ "field": "user_issue",
1928
+ "description": "Detailed description of the customer's issue",
1929
+ },
1930
+ "solution": {
1931
+ "field": "proposed_solution",
1932
+ "description": "Suggested fix for the customer's issue",
1933
+ },
1934
+ },
1935
+ model_artifact=model,
1936
+ invocation_config={"temperature": 0.5, "max_tokens": 200},
1937
+ description="Prompt for handling customer support queries",
1938
+ tag="support-v1",
1939
+ labels={"domain": "support"},
1940
+ )
1941
+
1911
1942
  # Log a prompt from file
1912
1943
  project.log_llm_prompt(
1913
- key="qa-prompt",
1914
- prompt_path="prompts/qa_template.txt",
1915
- prompt_legend={"question": "user_question"},
1944
+ key="qa_prompt",
1945
+ prompt_path="prompts/template.json",
1946
+ prompt_legend={
1947
+ "question": {
1948
+ "field": "user_question",
1949
+ "description": "The actual question asked by the user",
1950
+ }
1951
+ },
1916
1952
  model_artifact=model,
1953
+ invocation_config={"temperature": 0.7, "max_tokens": 256},
1954
+ description="Q&A prompt template with user-provided question",
1917
1955
  tag="v2",
1956
+ labels={"task": "qa", "stage": "experiment"},
1918
1957
  )
1919
1958
 
1920
1959
  :param key: Unique key for the prompt artifact.
@@ -1923,18 +1962,23 @@ class MlrunProject(ModelObj):
1923
1962
  "role": "user", "content": "I need your help with {profession}"]. only "role" and "content" keys allow in any
1924
1963
  str format (upper/lower case), keys will be modified to lower case.
1925
1964
  Cannot be used with `prompt_path`.
1926
- :param prompt_path: Path to a file containing the prompt. Mutually exclusive with `prompt_string`.
1965
+ :param prompt_path: Path to a JSON file containing the prompt template.
1966
+ Cannot be used together with `prompt_template`.
1967
+ The file should define a list of dictionaries in the same format
1968
+ supported by `prompt_template`.
1927
1969
  :param prompt_legend: A dictionary where each key is a placeholder in the prompt (e.g., ``{user_name}``)
1928
1970
  and the value is a dictionary holding two keys, "field", "description". "field" points to the field in
1929
1971
  the event where the value of the place-holder inside the event, if None or not exist will be replaced
1930
1972
  with the place-holder name. "description" will point to explanation of what that placeholder represents.
1931
1973
  Useful for documenting and clarifying dynamic parts of the prompt.
1932
1974
  :param model_artifact: Reference to the parent model (either `ModelArtifact` or model URI string).
1933
- :param model_configuration: Configuration dictionary for model generation parameters
1975
+ :param invocation_config: Configuration dictionary for model generation parameters
1934
1976
  (e.g., temperature, max tokens).
1935
- :param description: Optional description of the prompt.
1936
- :param target_path: Optional local target path for saving prompt content.
1937
- :param artifact_path: Storage path for the logged artifact.
1977
+ :param description: Optional description of the prompt.
1978
+ :param target_path: Absolute target path (instead of using artifact_path + local_path)
1979
+ :param artifact_path: Target artifact path (when not using the default)
1980
+ To define a subpath under the default location use:
1981
+ `artifact_path=context.artifact_subpath('data')`
1938
1982
  :param tag: Version tag for the artifact (e.g., "v1", "latest").
1939
1983
  :param labels: Labels to tag the artifact for filtering and organization.
1940
1984
  :param upload: Whether to upload the artifact to a remote datastore. Defaults to True.
@@ -1955,7 +1999,7 @@ class MlrunProject(ModelObj):
1955
1999
  prompt_path=prompt_path,
1956
2000
  prompt_legend=prompt_legend,
1957
2001
  model_artifact=model_artifact,
1958
- model_configuration=model_configuration,
2002
+ invocation_config=invocation_config,
1959
2003
  target_path=target_path,
1960
2004
  description=description,
1961
2005
  **kwargs,
@@ -2000,11 +2044,12 @@ class MlrunProject(ModelObj):
2000
2044
  This wrapper provides both access to the original vector
2001
2045
  store's capabilities and additional MLRun functionality.
2002
2046
 
2003
- Example:
2004
- >>> vector_store = Chroma(embedding_function=embeddings)
2005
- >>> collection = project.get_vector_store_collection(
2006
- ... vector_store, collection_name="my_collection"
2007
- ... )
2047
+ Example::
2048
+
2049
+ vector_store = Chroma(embedding_function=embeddings)
2050
+ collection = project.get_vector_store_collection(
2051
+ vector_store, collection_name="my_collection"
2052
+ )
2008
2053
  """
2009
2054
  return VectorStoreCollection(
2010
2055
  self,
@@ -2055,16 +2100,17 @@ class MlrunProject(ModelObj):
2055
2100
  :param kwargs: Additional keyword arguments
2056
2101
  :return: DocumentArtifact object
2057
2102
 
2058
- Example:
2059
- >>> # Log a PDF document with custom loader
2060
- >>> project.log_document(
2061
- ... local_path="path/to/doc.pdf",
2062
- ... document_loader=DocumentLoaderSpec(
2063
- ... loader_class_name="langchain_community.document_loaders.PDFLoader",
2064
- ... src_name="file_path",
2065
- ... kwargs={"extract_images": True},
2066
- ... ),
2067
- ... )
2103
+ Example::
2104
+
2105
+ # Log a PDF document with custom loader
2106
+ project.log_document(
2107
+ local_path="path/to/doc.pdf",
2108
+ document_loader=DocumentLoaderSpec(
2109
+ loader_class_name="langchain_community.document_loaders.PDFLoader",
2110
+ src_name="file_path",
2111
+ kwargs={"extract_images": True},
2112
+ ),
2113
+ )
2068
2114
 
2069
2115
  """
2070
2116
  if not key and not local_path and not target_path:
@@ -2343,8 +2389,9 @@ class MlrunProject(ModelObj):
2343
2389
  handler: Optional[str] = None,
2344
2390
  with_repo: Optional[bool] = None,
2345
2391
  tag: Optional[str] = None,
2346
- requirements: Optional[typing.Union[str, list[str]]] = None,
2392
+ requirements: Optional[list[str]] = None,
2347
2393
  requirements_file: str = "",
2394
+ local_path: Optional[str] = None,
2348
2395
  **application_kwargs,
2349
2396
  ) -> mlrun.runtimes.RemoteRuntime:
2350
2397
  """
@@ -2359,7 +2406,8 @@ class MlrunProject(ModelObj):
2359
2406
  )
2360
2407
 
2361
2408
  :param func: Remote function object or spec/code URL. :code:`None` refers to the current
2362
- notebook.
2409
+ notebook. May also be a hub URL of a module of kind model-monitoring-app in the
2410
+ format: hub://[{source}/]{name}[:{tag}].
2363
2411
  :param name: Name of the function (under the project), can be specified with a tag to support
2364
2412
  versions (e.g. myfunc:v1).
2365
2413
  :param image: Docker image to be used, can also be specified in
@@ -2374,6 +2422,8 @@ class MlrunProject(ModelObj):
2374
2422
  :param application_class: Name or an Instance of a class that implements the monitoring application.
2375
2423
  :param application_kwargs: Additional keyword arguments to be passed to the
2376
2424
  monitoring application's constructor.
2425
+ :param local_path: Path to a local directory to save the downloaded monitoring-app code files in,
2426
+ in case 'func' is a hub URL (defaults to current working directory).
2377
2427
  :returns: The model monitoring remote function object.
2378
2428
  """
2379
2429
  (
@@ -2390,6 +2440,7 @@ class MlrunProject(ModelObj):
2390
2440
  tag,
2391
2441
  requirements,
2392
2442
  requirements_file,
2443
+ local_path,
2393
2444
  **application_kwargs,
2394
2445
  )
2395
2446
  # save to project spec
@@ -2468,8 +2519,9 @@ class MlrunProject(ModelObj):
2468
2519
  handler: typing.Optional[str] = None,
2469
2520
  with_repo: typing.Optional[bool] = None,
2470
2521
  tag: typing.Optional[str] = None,
2471
- requirements: typing.Union[str, list[str], None] = None,
2522
+ requirements: typing.Union[list[str], None] = None,
2472
2523
  requirements_file: str = "",
2524
+ local_path: typing.Optional[str] = None,
2473
2525
  **application_kwargs,
2474
2526
  ) -> tuple[str, mlrun.runtimes.RemoteRuntime, dict]:
2475
2527
  import mlrun.model_monitoring.api
@@ -2486,6 +2538,7 @@ class MlrunProject(ModelObj):
2486
2538
  tag=tag,
2487
2539
  requirements=requirements,
2488
2540
  requirements_file=requirements_file,
2541
+ local_path=local_path,
2489
2542
  **application_kwargs,
2490
2543
  )
2491
2544
  elif isinstance(func, str) and isinstance(handler, str):
@@ -2531,7 +2584,7 @@ class MlrunProject(ModelObj):
2531
2584
  *,
2532
2585
  deploy_histogram_data_drift_app: bool = True,
2533
2586
  wait_for_deployment: bool = False,
2534
- fetch_credentials_from_sys_config: bool = False,
2587
+ fetch_credentials_from_sys_config: bool = False, # deprecated
2535
2588
  ) -> None:
2536
2589
  """
2537
2590
  Deploy model monitoring application controller, writer and stream functions.
@@ -2566,14 +2619,20 @@ class MlrunProject(ModelObj):
2566
2619
  :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2567
2620
  Otherwise, deploy the model monitoring infrastructure on the
2568
2621
  background, including the histogram data drift app if selected.
2569
- :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2622
+ :param fetch_credentials_from_sys_config: Deprecated. If true, fetch the credentials from the project
2623
+ configuration.
2570
2624
  """
2625
+ if fetch_credentials_from_sys_config:
2626
+ warnings.warn(
2627
+ "`fetch_credentials_from_sys_config` is deprecated in 1.10.0 and will be removed in 1.12.0.",
2628
+ # TODO: Remove this in 1.12.0
2629
+ FutureWarning,
2630
+ )
2571
2631
  if base_period < 10:
2572
2632
  logger.warn(
2573
2633
  "enable_model_monitoring: 'base_period' < 10 minutes is not supported in production environments",
2574
2634
  project=self.name,
2575
2635
  )
2576
-
2577
2636
  db = mlrun.db.get_run_db(secrets=self._secrets)
2578
2637
  db.enable_model_monitoring(
2579
2638
  project=self.name,
@@ -2706,16 +2765,18 @@ class MlrunProject(ModelObj):
2706
2765
  | Creating a function with non project source is done by specifying a module ``handler`` and on the
2707
2766
  returned function set the source with ``function.with_source_archive(<source>)``.
2708
2767
 
2709
- Support URL prefixes:
2768
+ Supported URL prefixes:
2710
2769
 
2711
2770
  | Object (s3://, v3io://, ..)
2712
2771
  | MLRun DB e.g. db://project/func:ver
2713
- | Functions hub/market: e.g. hub://auto-trainer:master
2772
+ | Hub/market: e.g. hub://auto-trainer:master
2714
2773
 
2715
2774
  Examples::
2716
2775
 
2717
2776
  proj.set_function(func_object)
2718
- proj.set_function("http://.../mynb.ipynb", "train")
2777
+ proj.set_function(
2778
+ "http://.../mynb.ipynb", "train", kind="job", image="mlrun/mlrun"
2779
+ )
2719
2780
  proj.set_function("./func.yaml")
2720
2781
  proj.set_function("hub://get_toy_data", "getdata")
2721
2782
 
@@ -2742,18 +2803,6 @@ class MlrunProject(ModelObj):
2742
2803
  # By providing a path to a pip requirements file
2743
2804
  proj.set_function("my.py", requirements="requirements.txt")
2744
2805
 
2745
- One of the most important parameters is 'kind', used to specify the chosen runtime. The options are:
2746
- - local: execute a local python or shell script
2747
- - job: insert the code into a Kubernetes pod and execute it
2748
- - nuclio: insert the code into a real-time serverless nuclio function
2749
- - serving: insert code into orchestrated nuclio function(s) forming a DAG
2750
- - dask: run the specified python code / script as Dask Distributed job
2751
- - mpijob: run distributed Horovod jobs over the MPI job operator
2752
- - spark: run distributed Spark job using Spark Kubernetes Operator
2753
- - remote-spark: run distributed Spark job on remote Spark service
2754
- - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2755
- - application: run a long living application (e.g. a web server, UI, etc.)
2756
-
2757
2806
  Learn more about :doc:`../../concepts/functions-overview`.
2758
2807
 
2759
2808
  :param func: Function object or spec/code url, None refers to current Notebook
@@ -2761,8 +2810,20 @@ class MlrunProject(ModelObj):
2761
2810
  Versions (e.g. myfunc:v1). If the `tag` parameter is provided, the tag in the name
2762
2811
  must match the tag parameter.
2763
2812
  Specifying a tag in the name will update the project's tagged function (myfunc:v1)
2764
- :param kind: Runtime kind e.g. job, nuclio, spark, dask, mpijob
2765
- Default: job
2813
+ :param kind: Default: job. One of
2814
+
2815
+ - local: execute a local python or shell script
2816
+ - job: insert the code into a Kubernetes pod and execute it
2817
+ - nuclio: insert the code into a real-time serverless nuclio function
2818
+ - serving: insert code into orchestrated nuclio function(s) forming a DAG
2819
+ - dask: run the specified python code / script as Dask Distributed job
2820
+ - mpijob: run distributed Horovod jobs over the MPI job operator
2821
+ - spark: run distributed Spark job using Spark Kubernetes Operator
2822
+ - remote-spark: run distributed Spark job on remote Spark service
2823
+ - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2824
+ - application: run a long living application (e.g. a web server, UI, etc.)
2825
+ - handler: execute a python handler (used automatically in notebooks or for debug)
2826
+
2766
2827
  :param image: Docker image to be used, can also be specified in the function object/yaml
2767
2828
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
2768
2829
  :param with_repo: Add (clone) the current repo to the build source - use when the function code is in
@@ -3360,7 +3421,12 @@ class MlrunProject(ModelObj):
3360
3421
  self._initialized = True
3361
3422
  return self.spec._function_objects
3362
3423
 
3363
- def with_secrets(self, kind, source, prefix=""):
3424
+ def with_secrets(
3425
+ self,
3426
+ kind,
3427
+ source,
3428
+ prefix="",
3429
+ ):
3364
3430
  """register a secrets source (file, env or dict)
3365
3431
 
3366
3432
  read secrets from a source provider to be used in workflows, example::
@@ -3382,12 +3448,19 @@ class MlrunProject(ModelObj):
3382
3448
 
3383
3449
  This will enable access to all secrets in vault registered to the current project.
3384
3450
 
3385
- :param kind: secret type (file, inline, env, vault)
3451
+ :param kind: secret type (file, inline, env, vault, azure_vault)
3386
3452
  :param source: secret data or link (see example)
3387
3453
  :param prefix: add a prefix to the keys in this source
3388
3454
 
3389
3455
  :returns: project object
3390
3456
  """
3457
+ # Block using mlrun-auth-secrets.* via azure_vault's k8s_secret param (client-side only)
3458
+ if kind == "azure_vault" and isinstance(source, dict):
3459
+ candidate_secret_name = (source.get("k8s_secret") or "").strip()
3460
+ if candidate_secret_name:
3461
+ mlrun.common.secrets.validate_not_forbidden_secret(
3462
+ candidate_secret_name
3463
+ )
3391
3464
 
3392
3465
  if kind == "vault" and isinstance(source, list):
3393
3466
  source = {"project": self.metadata.name, "secrets": source}
@@ -3414,7 +3487,7 @@ class MlrunProject(ModelObj):
3414
3487
  when using a secrets file it should have lines in the form KEY=VALUE, comment line start with "#"
3415
3488
  V3IO paths/credentials and MLrun service API address are dropped from the secrets
3416
3489
 
3417
- example secrets file:
3490
+ Example secrets file:
3418
3491
 
3419
3492
  .. code-block:: shell
3420
3493
 
@@ -3771,7 +3844,7 @@ class MlrunProject(ModelObj):
3771
3844
 
3772
3845
  import mlrun
3773
3846
  from mlrun.datastore.datastore_profile import (
3774
- DatastoreProfileKafkaSource,
3847
+ DatastoreProfileKafkaStream,
3775
3848
  DatastoreProfileTDEngine,
3776
3849
  )
3777
3850
 
@@ -3788,7 +3861,7 @@ class MlrunProject(ModelObj):
3788
3861
  project.register_datastore_profile(tsdb_profile)
3789
3862
 
3790
3863
  # Create and register stream profile
3791
- stream_profile = DatastoreProfileKafkaSource(
3864
+ stream_profile = DatastoreProfileKafkaStream(
3792
3865
  name="my-kafka",
3793
3866
  brokers=["<kafka-broker-ip-address>:9094"],
3794
3867
  topics=[], # Keep the topics list empty
@@ -3830,9 +3903,9 @@ class MlrunProject(ModelObj):
3830
3903
 
3831
3904
  .. code-block:: python
3832
3905
 
3833
- from mlrun.datastore.datastore_profile import DatastoreProfileKafkaSource
3906
+ from mlrun.datastore.datastore_profile import DatastoreProfileKafkaStream
3834
3907
 
3835
- stream_profile = DatastoreProfileKafkaSource(
3908
+ stream_profile = DatastoreProfileKafkaStream(
3836
3909
  name="confluent-kafka",
3837
3910
  brokers=["<server-domain-start>.confluent.cloud:9092"],
3838
3911
  topics=[],
@@ -3861,7 +3934,7 @@ class MlrunProject(ModelObj):
3861
3934
  The supported profiles are:
3862
3935
 
3863
3936
  * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3864
- * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource`
3937
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaStream`
3865
3938
 
3866
3939
  You need to register one of them, and pass the profile's name.
3867
3940
  :param replace_creds: If ``True`` - override the existing credentials.
@@ -3901,6 +3974,9 @@ class MlrunProject(ModelObj):
3901
3974
  start: Optional[datetime.datetime] = None,
3902
3975
  end: Optional[datetime.datetime] = None,
3903
3976
  top_level: bool = False,
3977
+ modes: Optional[
3978
+ Union[mm_constants.EndpointMode, list[mm_constants.EndpointMode]]
3979
+ ] = None,
3904
3980
  uids: Optional[list[str]] = None,
3905
3981
  latest_only: bool = False,
3906
3982
  tsdb_metrics: bool = False,
@@ -3916,8 +3992,9 @@ class MlrunProject(ModelObj):
3916
3992
  5) function_tag
3917
3993
  6) labels
3918
3994
  7) top level
3919
- 8) uids
3920
- 9) start and end time, corresponding to the `created` field.
3995
+ 8) modes
3996
+ 9) uids
3997
+ 10) start and end time, corresponding to the `created` field.
3921
3998
  By default, when no filters are applied, all available endpoints for the given project will be listed.
3922
3999
 
3923
4000
  In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
@@ -3937,6 +4014,8 @@ class MlrunProject(ModelObj):
3937
4014
  :param start: The start time to filter by.Corresponding to the `created` field.
3938
4015
  :param end: The end time to filter by. Corresponding to the `created` field.
3939
4016
  :param top_level: If true will return only routers and endpoint that are NOT children of any router.
4017
+ :param modes: Specifies the mode of the model endpoint. Can be "real-time" (0), "batch" (1),
4018
+ "batch_legacy" (2). If set to None, all are included.
3940
4019
  :param uids: If passed will return a list `ModelEndpoint` object with uid in uids.
3941
4020
  :param tsdb_metrics: When True, the time series metrics will be added to the output
3942
4021
  of the resulting.
@@ -3958,6 +4037,7 @@ class MlrunProject(ModelObj):
3958
4037
  start=start,
3959
4038
  end=end,
3960
4039
  top_level=top_level,
4040
+ modes=modes,
3961
4041
  uids=uids,
3962
4042
  latest_only=latest_only,
3963
4043
  tsdb_metrics=tsdb_metrics,
@@ -3993,7 +4073,7 @@ class MlrunProject(ModelObj):
3993
4073
  ) -> typing.Union[mlrun.model.RunObject, PipelineNodeWrapper]:
3994
4074
  """Run a local or remote task as part of a local/kubeflow pipeline
3995
4075
 
3996
- example (use with project)::
4076
+ Example (use with project)::
3997
4077
 
3998
4078
  # create a project with two functions (local and from hub)
3999
4079
  project = mlrun.new_project(project_name, "./proj")
@@ -4052,7 +4132,12 @@ class MlrunProject(ModelObj):
4052
4132
  This ensures latest code changes are executed. This argument must be used in
4053
4133
  conjunction with the local=True argument.
4054
4134
  :param output_path: path to store artifacts, when running in a workflow this will be set automatically
4055
- :param retry: Retry configuration for the run, can be a dict or an instance of mlrun.model.Retry.
4135
+ :param retry: Retry configuration for the run, can be a dict or an instance of
4136
+ :py:class:`~mlrun.model.Retry`.
4137
+ The `count` field in the `Retry` object specifies the number of retry attempts.
4138
+ If `count=0`, the run will not be retried.
4139
+ The `backoff` field specifies the retry backoff strategy between retry attempts.
4140
+ If not provided, the default backoff delay is 30 seconds.
4056
4141
  :return: MLRun RunObject or PipelineNodeWrapper
4057
4142
  """
4058
4143
  if artifact_path:
@@ -4827,7 +4912,7 @@ class MlrunProject(ModelObj):
4827
4912
  ):
4828
4913
  """Retrieve a list of functions, filtered by specific criteria.
4829
4914
 
4830
- example::
4915
+ Example::
4831
4916
 
4832
4917
  functions = project.list_functions(tag="latest")
4833
4918
 
@@ -4966,12 +5051,14 @@ class MlrunProject(ModelObj):
4966
5051
  include_infra: bool = True,
4967
5052
  ) -> list[mlrun.common.schemas.model_monitoring.FunctionSummary]:
4968
5053
  """Get monitoring function summaries for the specified project.
5054
+
4969
5055
  :param start: Start time for filtering the results (optional).
4970
5056
  :param end: End time for filtering the results (optional).
4971
5057
  :param names: List of function names to filter by (optional).
4972
5058
  :param labels: Labels to filter by (optional).
4973
5059
  :param include_stats: Whether to include statistics in the response (default is False).
4974
5060
  :param include_infra: whether to include model monitoring infrastructure functions (default is True).
5061
+
4975
5062
  :return: A list of FunctionSummary objects containing information about the monitoring functions.
4976
5063
  """
4977
5064
 
@@ -5000,6 +5087,7 @@ class MlrunProject(ModelObj):
5000
5087
  include_latest_metrics: bool = False,
5001
5088
  ) -> mlrun.common.schemas.model_monitoring.FunctionSummary:
5002
5089
  """Get a monitoring function summary for the specified project and function name.
5090
+
5003
5091
  :param name: Name of the monitoring function to retrieve the summary for.
5004
5092
  :param start: Start time for filtering the results (optional).
5005
5093
  :param end: End time for filtering the results (optional).