mlrun 1.10.0rc13__py3-none-any.whl → 1.10.0rc42__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (107) hide show
  1. mlrun/__init__.py +22 -2
  2. mlrun/artifacts/base.py +0 -31
  3. mlrun/artifacts/document.py +6 -1
  4. mlrun/artifacts/llm_prompt.py +123 -25
  5. mlrun/artifacts/manager.py +0 -5
  6. mlrun/artifacts/model.py +3 -3
  7. mlrun/common/constants.py +10 -1
  8. mlrun/common/formatters/artifact.py +1 -0
  9. mlrun/common/model_monitoring/helpers.py +86 -0
  10. mlrun/common/schemas/__init__.py +3 -0
  11. mlrun/common/schemas/auth.py +2 -0
  12. mlrun/common/schemas/function.py +10 -0
  13. mlrun/common/schemas/hub.py +30 -18
  14. mlrun/common/schemas/model_monitoring/__init__.py +3 -0
  15. mlrun/common/schemas/model_monitoring/constants.py +30 -6
  16. mlrun/common/schemas/model_monitoring/functions.py +14 -5
  17. mlrun/common/schemas/model_monitoring/model_endpoints.py +21 -0
  18. mlrun/common/schemas/pipeline.py +1 -1
  19. mlrun/common/schemas/serving.py +3 -0
  20. mlrun/common/schemas/workflow.py +3 -1
  21. mlrun/common/secrets.py +22 -1
  22. mlrun/config.py +33 -11
  23. mlrun/datastore/__init__.py +11 -3
  24. mlrun/datastore/azure_blob.py +162 -47
  25. mlrun/datastore/datastore.py +9 -4
  26. mlrun/datastore/datastore_profile.py +61 -5
  27. mlrun/datastore/model_provider/huggingface_provider.py +363 -0
  28. mlrun/datastore/model_provider/mock_model_provider.py +87 -0
  29. mlrun/datastore/model_provider/model_provider.py +230 -65
  30. mlrun/datastore/model_provider/openai_provider.py +295 -42
  31. mlrun/datastore/s3.py +24 -2
  32. mlrun/datastore/storeytargets.py +2 -3
  33. mlrun/datastore/utils.py +15 -3
  34. mlrun/db/base.py +47 -19
  35. mlrun/db/httpdb.py +120 -56
  36. mlrun/db/nopdb.py +38 -10
  37. mlrun/execution.py +70 -19
  38. mlrun/hub/__init__.py +15 -0
  39. mlrun/hub/module.py +181 -0
  40. mlrun/k8s_utils.py +105 -16
  41. mlrun/launcher/base.py +13 -6
  42. mlrun/launcher/local.py +15 -0
  43. mlrun/model.py +24 -3
  44. mlrun/model_monitoring/__init__.py +1 -0
  45. mlrun/model_monitoring/api.py +66 -27
  46. mlrun/model_monitoring/applications/__init__.py +1 -1
  47. mlrun/model_monitoring/applications/base.py +509 -117
  48. mlrun/model_monitoring/applications/context.py +2 -4
  49. mlrun/model_monitoring/applications/results.py +4 -7
  50. mlrun/model_monitoring/controller.py +239 -101
  51. mlrun/model_monitoring/db/_schedules.py +116 -33
  52. mlrun/model_monitoring/db/_stats.py +4 -3
  53. mlrun/model_monitoring/db/tsdb/base.py +100 -9
  54. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +11 -6
  55. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +191 -50
  56. mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +51 -0
  57. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +17 -4
  58. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +259 -40
  59. mlrun/model_monitoring/helpers.py +54 -9
  60. mlrun/model_monitoring/stream_processing.py +45 -14
  61. mlrun/model_monitoring/writer.py +220 -1
  62. mlrun/platforms/__init__.py +3 -2
  63. mlrun/platforms/iguazio.py +7 -3
  64. mlrun/projects/operations.py +6 -1
  65. mlrun/projects/pipelines.py +46 -26
  66. mlrun/projects/project.py +166 -58
  67. mlrun/run.py +94 -17
  68. mlrun/runtimes/__init__.py +18 -0
  69. mlrun/runtimes/base.py +14 -6
  70. mlrun/runtimes/daskjob.py +7 -0
  71. mlrun/runtimes/local.py +5 -2
  72. mlrun/runtimes/mounts.py +20 -2
  73. mlrun/runtimes/mpijob/abstract.py +6 -0
  74. mlrun/runtimes/mpijob/v1.py +6 -0
  75. mlrun/runtimes/nuclio/__init__.py +1 -0
  76. mlrun/runtimes/nuclio/application/application.py +149 -17
  77. mlrun/runtimes/nuclio/function.py +76 -27
  78. mlrun/runtimes/nuclio/serving.py +97 -15
  79. mlrun/runtimes/pod.py +234 -21
  80. mlrun/runtimes/remotesparkjob.py +6 -0
  81. mlrun/runtimes/sparkjob/spark3job.py +6 -0
  82. mlrun/runtimes/utils.py +49 -11
  83. mlrun/secrets.py +54 -13
  84. mlrun/serving/__init__.py +2 -0
  85. mlrun/serving/remote.py +79 -6
  86. mlrun/serving/routers.py +23 -41
  87. mlrun/serving/server.py +320 -80
  88. mlrun/serving/states.py +725 -157
  89. mlrun/serving/steps.py +62 -0
  90. mlrun/serving/system_steps.py +200 -119
  91. mlrun/serving/v2_serving.py +9 -10
  92. mlrun/utils/helpers.py +288 -88
  93. mlrun/utils/logger.py +3 -1
  94. mlrun/utils/notifications/notification/base.py +18 -0
  95. mlrun/utils/notifications/notification/git.py +2 -4
  96. mlrun/utils/notifications/notification/slack.py +2 -4
  97. mlrun/utils/notifications/notification/webhook.py +2 -5
  98. mlrun/utils/notifications/notification_pusher.py +1 -1
  99. mlrun/utils/retryer.py +15 -2
  100. mlrun/utils/version/version.json +2 -2
  101. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/METADATA +45 -51
  102. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/RECORD +106 -101
  103. mlrun/api/schemas/__init__.py +0 -259
  104. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/WHEEL +0 -0
  105. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/entry_points.txt +0 -0
  106. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/licenses/LICENSE +0 -0
  107. {mlrun-1.10.0rc13.dist-info → mlrun-1.10.0rc42.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -45,6 +45,7 @@ import mlrun.common.runtimes.constants
45
45
  import mlrun.common.schemas.alert
46
46
  import mlrun.common.schemas.artifact
47
47
  import mlrun.common.schemas.model_monitoring.constants as mm_constants
48
+ import mlrun.common.secrets
48
49
  import mlrun.datastore.datastore_profile
49
50
  import mlrun.db
50
51
  import mlrun.errors
@@ -1042,12 +1043,7 @@ class ProjectSpec(ModelObj):
1042
1043
  artifact = artifact.to_dict()
1043
1044
  else: # artifact is a dict
1044
1045
  # imported/legacy artifacts don't have metadata,spec,status fields
1045
- key_field = (
1046
- "key"
1047
- if _is_imported_artifact(artifact)
1048
- or mlrun.utils.is_legacy_artifact(artifact)
1049
- else "metadata.key"
1050
- )
1046
+ key_field = "key" if _is_imported_artifact(artifact) else "metadata.key"
1051
1047
  key = mlrun.utils.get_in(artifact, key_field, "")
1052
1048
  if not key:
1053
1049
  raise ValueError(f'artifacts "{key_field}" must be specified')
@@ -1889,11 +1885,11 @@ class MlrunProject(ModelObj):
1889
1885
  def log_llm_prompt(
1890
1886
  self,
1891
1887
  key,
1892
- prompt_string: Optional[str] = None,
1888
+ prompt_template: Optional[list[dict]] = None,
1893
1889
  prompt_path: Optional[str] = None,
1894
1890
  prompt_legend: Optional[dict] = None,
1895
1891
  model_artifact: Union[ModelArtifact, str] = None,
1896
- model_configuration: Optional[dict] = None,
1892
+ invocation_config: Optional[dict] = None,
1897
1893
  description: Optional[str] = None,
1898
1894
  target_path: Optional[str] = None,
1899
1895
  artifact_path: Optional[str] = None,
@@ -1913,27 +1909,76 @@ class MlrunProject(ModelObj):
1913
1909
 
1914
1910
  Examples::
1915
1911
 
1912
+ # Log directly with an inline prompt template
1913
+ project.log_llm_prompt(
1914
+ key="customer_support_prompt",
1915
+ prompt_template=[
1916
+ {
1917
+ "role": "system",
1918
+ "content": "You are a helpful customer support assistant.",
1919
+ },
1920
+ {
1921
+ "role": "user",
1922
+ "content": "The customer reports: {issue_description}",
1923
+ },
1924
+ ],
1925
+ prompt_legend={
1926
+ "issue_description": {
1927
+ "field": "user_issue",
1928
+ "description": "Detailed description of the customer's issue",
1929
+ },
1930
+ "solution": {
1931
+ "field": "proposed_solution",
1932
+ "description": "Suggested fix for the customer's issue",
1933
+ },
1934
+ },
1935
+ model_artifact=model,
1936
+ invocation_config={"temperature": 0.5, "max_tokens": 200},
1937
+ description="Prompt for handling customer support queries",
1938
+ tag="support-v1",
1939
+ labels={"domain": "support"},
1940
+ )
1941
+
1916
1942
  # Log a prompt from file
1917
1943
  project.log_llm_prompt(
1918
- key="qa-prompt",
1919
- prompt_path="prompts/qa_template.txt",
1920
- prompt_legend={"question": "user_question"},
1944
+ key="qa_prompt",
1945
+ prompt_path="prompts/template.json",
1946
+ prompt_legend={
1947
+ "question": {
1948
+ "field": "user_question",
1949
+ "description": "The actual question asked by the user",
1950
+ }
1951
+ },
1921
1952
  model_artifact=model,
1953
+ invocation_config={"temperature": 0.7, "max_tokens": 256},
1954
+ description="Q&A prompt template with user-provided question",
1922
1955
  tag="v2",
1956
+ labels={"task": "qa", "stage": "experiment"},
1923
1957
  )
1924
1958
 
1925
1959
  :param key: Unique key for the prompt artifact.
1926
- :param prompt_string: Raw prompt text. Mutually exclusive with `prompt_path`.
1927
- :param prompt_path: Path to a file containing the prompt. Mutually exclusive with `prompt_string`.
1960
+ :param prompt_template: Raw prompt list of dicts -
1961
+ [{"role": "system", "content": "You are a {profession} advisor"},
1962
+ "role": "user", "content": "I need your help with {profession}"]. only "role" and "content" keys allow in any
1963
+ str format (upper/lower case), keys will be modified to lower case.
1964
+ Cannot be used with `prompt_path`.
1965
+ :param prompt_path: Path to a JSON file containing the prompt template.
1966
+ Cannot be used together with `prompt_template`.
1967
+ The file should define a list of dictionaries in the same format
1968
+ supported by `prompt_template`.
1928
1969
  :param prompt_legend: A dictionary where each key is a placeholder in the prompt (e.g., ``{user_name}``)
1929
- and the value is a description or explanation of what that placeholder represents.
1970
+ and the value is a dictionary holding two keys, "field", "description". "field" points to the field in
1971
+ the event where the value of the place-holder inside the event, if None or not exist will be replaced
1972
+ with the place-holder name. "description" will point to explanation of what that placeholder represents.
1930
1973
  Useful for documenting and clarifying dynamic parts of the prompt.
1931
1974
  :param model_artifact: Reference to the parent model (either `ModelArtifact` or model URI string).
1932
- :param model_configuration: Configuration dictionary for model generation parameters
1975
+ :param invocation_config: Configuration dictionary for model generation parameters
1933
1976
  (e.g., temperature, max tokens).
1934
- :param description: Optional description of the prompt.
1935
- :param target_path: Optional local target path for saving prompt content.
1936
- :param artifact_path: Storage path for the logged artifact.
1977
+ :param description: Optional description of the prompt.
1978
+ :param target_path: Absolute target path (instead of using artifact_path + local_path)
1979
+ :param artifact_path: Target artifact path (when not using the default)
1980
+ To define a subpath under the default location use:
1981
+ `artifact_path=context.artifact_subpath('data')`
1937
1982
  :param tag: Version tag for the artifact (e.g., "v1", "latest").
1938
1983
  :param labels: Labels to tag the artifact for filtering and organization.
1939
1984
  :param upload: Whether to upload the artifact to a remote datastore. Defaults to True.
@@ -1942,19 +1987,19 @@ class MlrunProject(ModelObj):
1942
1987
  :returns: The logged `LLMPromptArtifact` object.
1943
1988
  """
1944
1989
 
1945
- if not prompt_string and not prompt_path:
1990
+ if not prompt_template and not prompt_path:
1946
1991
  raise mlrun.errors.MLRunInvalidArgumentError(
1947
- "Either 'prompt_string' or 'prompt_path' must be provided"
1992
+ "Either 'prompt_template' or 'prompt_path' must be provided"
1948
1993
  )
1949
1994
 
1950
1995
  llm_prompt = LLMPromptArtifact(
1951
1996
  key=key,
1952
1997
  project=self.name,
1953
- prompt_string=prompt_string,
1998
+ prompt_template=prompt_template,
1954
1999
  prompt_path=prompt_path,
1955
2000
  prompt_legend=prompt_legend,
1956
2001
  model_artifact=model_artifact,
1957
- model_configuration=model_configuration,
2002
+ invocation_config=invocation_config,
1958
2003
  target_path=target_path,
1959
2004
  description=description,
1960
2005
  **kwargs,
@@ -2342,8 +2387,9 @@ class MlrunProject(ModelObj):
2342
2387
  handler: Optional[str] = None,
2343
2388
  with_repo: Optional[bool] = None,
2344
2389
  tag: Optional[str] = None,
2345
- requirements: Optional[typing.Union[str, list[str]]] = None,
2390
+ requirements: Optional[list[str]] = None,
2346
2391
  requirements_file: str = "",
2392
+ local_path: Optional[str] = None,
2347
2393
  **application_kwargs,
2348
2394
  ) -> mlrun.runtimes.RemoteRuntime:
2349
2395
  """
@@ -2358,7 +2404,8 @@ class MlrunProject(ModelObj):
2358
2404
  )
2359
2405
 
2360
2406
  :param func: Remote function object or spec/code URL. :code:`None` refers to the current
2361
- notebook.
2407
+ notebook. May also be a hub URL of a module of kind model-monitoring-app in the
2408
+ format: hub://[{source}/]{name}[:{tag}].
2362
2409
  :param name: Name of the function (under the project), can be specified with a tag to support
2363
2410
  versions (e.g. myfunc:v1).
2364
2411
  :param image: Docker image to be used, can also be specified in
@@ -2373,6 +2420,8 @@ class MlrunProject(ModelObj):
2373
2420
  :param application_class: Name or an Instance of a class that implements the monitoring application.
2374
2421
  :param application_kwargs: Additional keyword arguments to be passed to the
2375
2422
  monitoring application's constructor.
2423
+ :param local_path: Path to a local directory to save the downloaded monitoring-app code files in,
2424
+ in case 'func' is a hub URL (defaults to current working directory).
2376
2425
  :returns: The model monitoring remote function object.
2377
2426
  """
2378
2427
  (
@@ -2389,6 +2438,7 @@ class MlrunProject(ModelObj):
2389
2438
  tag,
2390
2439
  requirements,
2391
2440
  requirements_file,
2441
+ local_path,
2392
2442
  **application_kwargs,
2393
2443
  )
2394
2444
  # save to project spec
@@ -2467,8 +2517,9 @@ class MlrunProject(ModelObj):
2467
2517
  handler: typing.Optional[str] = None,
2468
2518
  with_repo: typing.Optional[bool] = None,
2469
2519
  tag: typing.Optional[str] = None,
2470
- requirements: typing.Union[str, list[str], None] = None,
2520
+ requirements: typing.Union[list[str], None] = None,
2471
2521
  requirements_file: str = "",
2522
+ local_path: typing.Optional[str] = None,
2472
2523
  **application_kwargs,
2473
2524
  ) -> tuple[str, mlrun.runtimes.RemoteRuntime, dict]:
2474
2525
  import mlrun.model_monitoring.api
@@ -2485,6 +2536,7 @@ class MlrunProject(ModelObj):
2485
2536
  tag=tag,
2486
2537
  requirements=requirements,
2487
2538
  requirements_file=requirements_file,
2539
+ local_path=local_path,
2488
2540
  **application_kwargs,
2489
2541
  )
2490
2542
  elif isinstance(func, str) and isinstance(handler, str):
@@ -2530,7 +2582,7 @@ class MlrunProject(ModelObj):
2530
2582
  *,
2531
2583
  deploy_histogram_data_drift_app: bool = True,
2532
2584
  wait_for_deployment: bool = False,
2533
- fetch_credentials_from_sys_config: bool = False,
2585
+ fetch_credentials_from_sys_config: bool = False, # deprecated
2534
2586
  ) -> None:
2535
2587
  """
2536
2588
  Deploy model monitoring application controller, writer and stream functions.
@@ -2565,14 +2617,20 @@ class MlrunProject(ModelObj):
2565
2617
  :param wait_for_deployment: If true, return only after the deployment is done on the backend.
2566
2618
  Otherwise, deploy the model monitoring infrastructure on the
2567
2619
  background, including the histogram data drift app if selected.
2568
- :param fetch_credentials_from_sys_config: If true, fetch the credentials from the system configuration.
2620
+ :param fetch_credentials_from_sys_config: Deprecated. If true, fetch the credentials from the project
2621
+ configuration.
2569
2622
  """
2623
+ if fetch_credentials_from_sys_config:
2624
+ warnings.warn(
2625
+ "`fetch_credentials_from_sys_config` is deprecated in 1.10.0 and will be removed in 1.12.0.",
2626
+ # TODO: Remove this in 1.12.0
2627
+ FutureWarning,
2628
+ )
2570
2629
  if base_period < 10:
2571
2630
  logger.warn(
2572
2631
  "enable_model_monitoring: 'base_period' < 10 minutes is not supported in production environments",
2573
2632
  project=self.name,
2574
2633
  )
2575
-
2576
2634
  db = mlrun.db.get_run_db(secrets=self._secrets)
2577
2635
  db.enable_model_monitoring(
2578
2636
  project=self.name,
@@ -2705,16 +2763,18 @@ class MlrunProject(ModelObj):
2705
2763
  | Creating a function with non project source is done by specifying a module ``handler`` and on the
2706
2764
  returned function set the source with ``function.with_source_archive(<source>)``.
2707
2765
 
2708
- Support URL prefixes:
2766
+ Supported URL prefixes:
2709
2767
 
2710
- | Object (s3://, v3io://, ..)
2711
- | MLRun DB e.g. db://project/func:ver
2712
- | Functions hub/market: e.g. hub://auto-trainer:master
2768
+ - Object: s3://, v3io://, etc.
2769
+ - MLRun DB: e.g db://project/func:ver
2770
+ - Function hub/market: e.g. hub://auto-trainer:master
2713
2771
 
2714
2772
  Examples::
2715
2773
 
2716
2774
  proj.set_function(func_object)
2717
- proj.set_function("http://.../mynb.ipynb", "train")
2775
+ proj.set_function(
2776
+ "http://.../mynb.ipynb", "train", kind="job", image="mlrun/mlrun"
2777
+ )
2718
2778
  proj.set_function("./func.yaml")
2719
2779
  proj.set_function("hub://get_toy_data", "getdata")
2720
2780
 
@@ -2741,18 +2801,6 @@ class MlrunProject(ModelObj):
2741
2801
  # By providing a path to a pip requirements file
2742
2802
  proj.set_function("my.py", requirements="requirements.txt")
2743
2803
 
2744
- One of the most important parameters is 'kind', used to specify the chosen runtime. The options are:
2745
- - local: execute a local python or shell script
2746
- - job: insert the code into a Kubernetes pod and execute it
2747
- - nuclio: insert the code into a real-time serverless nuclio function
2748
- - serving: insert code into orchestrated nuclio function(s) forming a DAG
2749
- - dask: run the specified python code / script as Dask Distributed job
2750
- - mpijob: run distributed Horovod jobs over the MPI job operator
2751
- - spark: run distributed Spark job using Spark Kubernetes Operator
2752
- - remote-spark: run distributed Spark job on remote Spark service
2753
- - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2754
- - application: run a long living application (e.g. a web server, UI, etc.)
2755
-
2756
2804
  Learn more about :doc:`../../concepts/functions-overview`.
2757
2805
 
2758
2806
  :param func: Function object or spec/code url, None refers to current Notebook
@@ -2760,8 +2808,20 @@ class MlrunProject(ModelObj):
2760
2808
  Versions (e.g. myfunc:v1). If the `tag` parameter is provided, the tag in the name
2761
2809
  must match the tag parameter.
2762
2810
  Specifying a tag in the name will update the project's tagged function (myfunc:v1)
2763
- :param kind: Runtime kind e.g. job, nuclio, spark, dask, mpijob
2764
- Default: job
2811
+ :param kind: Default: job. One of
2812
+
2813
+ - local: execute a local python or shell script
2814
+ - job: insert the code into a Kubernetes pod and execute it
2815
+ - nuclio: insert the code into a real-time serverless nuclio function
2816
+ - serving: insert code into orchestrated nuclio function(s) forming a DAG
2817
+ - dask: run the specified python code / script as Dask Distributed job
2818
+ - mpijob: run distributed Horovod jobs over the MPI job operator
2819
+ - spark: run distributed Spark job using Spark Kubernetes Operator
2820
+ - remote-spark: run distributed Spark job on remote Spark service
2821
+ - databricks: run code on Databricks cluster (python scripts, Spark etc.)
2822
+ - application: run a long living application (e.g. a web server, UI, etc.)
2823
+ - handler: execute a python handler (used automatically in notebooks or for debug)
2824
+
2765
2825
  :param image: Docker image to be used, can also be specified in the function object/yaml
2766
2826
  :param handler: Default function handler to invoke (can only be set with .py/.ipynb files)
2767
2827
  :param with_repo: Add (clone) the current repo to the build source - use when the function code is in
@@ -3359,7 +3419,12 @@ class MlrunProject(ModelObj):
3359
3419
  self._initialized = True
3360
3420
  return self.spec._function_objects
3361
3421
 
3362
- def with_secrets(self, kind, source, prefix=""):
3422
+ def with_secrets(
3423
+ self,
3424
+ kind,
3425
+ source,
3426
+ prefix="",
3427
+ ):
3363
3428
  """register a secrets source (file, env or dict)
3364
3429
 
3365
3430
  read secrets from a source provider to be used in workflows, example::
@@ -3381,12 +3446,19 @@ class MlrunProject(ModelObj):
3381
3446
 
3382
3447
  This will enable access to all secrets in vault registered to the current project.
3383
3448
 
3384
- :param kind: secret type (file, inline, env, vault)
3449
+ :param kind: secret type (file, inline, env, vault, azure_vault)
3385
3450
  :param source: secret data or link (see example)
3386
3451
  :param prefix: add a prefix to the keys in this source
3387
3452
 
3388
3453
  :returns: project object
3389
3454
  """
3455
+ # Block using mlrun-auth-secrets.* via azure_vault's k8s_secret param (client-side only)
3456
+ if kind == "azure_vault" and isinstance(source, dict):
3457
+ candidate_secret_name = (source.get("k8s_secret") or "").strip()
3458
+ if candidate_secret_name:
3459
+ mlrun.common.secrets.validate_not_forbidden_secret(
3460
+ candidate_secret_name
3461
+ )
3390
3462
 
3391
3463
  if kind == "vault" and isinstance(source, list):
3392
3464
  source = {"project": self.metadata.name, "secrets": source}
@@ -3770,7 +3842,7 @@ class MlrunProject(ModelObj):
3770
3842
 
3771
3843
  import mlrun
3772
3844
  from mlrun.datastore.datastore_profile import (
3773
- DatastoreProfileKafkaSource,
3845
+ DatastoreProfileKafkaStream,
3774
3846
  DatastoreProfileTDEngine,
3775
3847
  )
3776
3848
 
@@ -3787,7 +3859,7 @@ class MlrunProject(ModelObj):
3787
3859
  project.register_datastore_profile(tsdb_profile)
3788
3860
 
3789
3861
  # Create and register stream profile
3790
- stream_profile = DatastoreProfileKafkaSource(
3862
+ stream_profile = DatastoreProfileKafkaStream(
3791
3863
  name="my-kafka",
3792
3864
  brokers=["<kafka-broker-ip-address>:9094"],
3793
3865
  topics=[], # Keep the topics list empty
@@ -3829,9 +3901,9 @@ class MlrunProject(ModelObj):
3829
3901
 
3830
3902
  .. code-block:: python
3831
3903
 
3832
- from mlrun.datastore.datastore_profile import DatastoreProfileKafkaSource
3904
+ from mlrun.datastore.datastore_profile import DatastoreProfileKafkaStream
3833
3905
 
3834
- stream_profile = DatastoreProfileKafkaSource(
3906
+ stream_profile = DatastoreProfileKafkaStream(
3835
3907
  name="confluent-kafka",
3836
3908
  brokers=["<server-domain-start>.confluent.cloud:9092"],
3837
3909
  topics=[],
@@ -3860,7 +3932,7 @@ class MlrunProject(ModelObj):
3860
3932
  The supported profiles are:
3861
3933
 
3862
3934
  * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileV3io`
3863
- * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource`
3935
+ * :py:class:`~mlrun.datastore.datastore_profile.DatastoreProfileKafkaStream`
3864
3936
 
3865
3937
  You need to register one of them, and pass the profile's name.
3866
3938
  :param replace_creds: If ``True`` - override the existing credentials.
@@ -3900,6 +3972,9 @@ class MlrunProject(ModelObj):
3900
3972
  start: Optional[datetime.datetime] = None,
3901
3973
  end: Optional[datetime.datetime] = None,
3902
3974
  top_level: bool = False,
3975
+ modes: Optional[
3976
+ Union[mm_constants.EndpointMode, list[mm_constants.EndpointMode]]
3977
+ ] = None,
3903
3978
  uids: Optional[list[str]] = None,
3904
3979
  latest_only: bool = False,
3905
3980
  tsdb_metrics: bool = False,
@@ -3915,8 +3990,9 @@ class MlrunProject(ModelObj):
3915
3990
  5) function_tag
3916
3991
  6) labels
3917
3992
  7) top level
3918
- 8) uids
3919
- 9) start and end time, corresponding to the `created` field.
3993
+ 8) modes
3994
+ 9) uids
3995
+ 10) start and end time, corresponding to the `created` field.
3920
3996
  By default, when no filters are applied, all available endpoints for the given project will be listed.
3921
3997
 
3922
3998
  In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
@@ -3936,6 +4012,8 @@ class MlrunProject(ModelObj):
3936
4012
  :param start: The start time to filter by.Corresponding to the `created` field.
3937
4013
  :param end: The end time to filter by. Corresponding to the `created` field.
3938
4014
  :param top_level: If true will return only routers and endpoint that are NOT children of any router.
4015
+ :param modes: Specifies the mode of the model endpoint. Can be "real-time" (0), "batch" (1),
4016
+ "batch_legacy" (2). If set to None, all are included.
3939
4017
  :param uids: If passed will return a list `ModelEndpoint` object with uid in uids.
3940
4018
  :param tsdb_metrics: When True, the time series metrics will be added to the output
3941
4019
  of the resulting.
@@ -3957,6 +4035,7 @@ class MlrunProject(ModelObj):
3957
4035
  start=start,
3958
4036
  end=end,
3959
4037
  top_level=top_level,
4038
+ modes=modes,
3960
4039
  uids=uids,
3961
4040
  latest_only=latest_only,
3962
4041
  tsdb_metrics=tsdb_metrics,
@@ -4051,7 +4130,12 @@ class MlrunProject(ModelObj):
4051
4130
  This ensures latest code changes are executed. This argument must be used in
4052
4131
  conjunction with the local=True argument.
4053
4132
  :param output_path: path to store artifacts, when running in a workflow this will be set automatically
4054
- :param retry: Retry configuration for the run, can be a dict or an instance of mlrun.model.Retry.
4133
+ :param retry: Retry configuration for the run, can be a dict or an instance of
4134
+ :py:class:`~mlrun.model.Retry`.
4135
+ The `count` field in the `Retry` object specifies the number of retry attempts.
4136
+ If `count=0`, the run will not be retried.
4137
+ The `backoff` field specifies the retry backoff strategy between retry attempts.
4138
+ If not provided, the default backoff delay is 30 seconds.
4055
4139
  :return: MLRun RunObject or PipelineNodeWrapper
4056
4140
  """
4057
4141
  if artifact_path:
@@ -5072,7 +5156,6 @@ class MlrunProject(ModelObj):
5072
5156
  :param states: List only runs whose state is one of the provided states.
5073
5157
  :param sort: Whether to sort the result according to their start time. Otherwise, results will be
5074
5158
  returned by their internal order in the DB (order will not be guaranteed).
5075
- :param last: Deprecated - currently not used (will be removed in 1.10.0).
5076
5159
  :param iter: If ``True`` return runs from all iterations. Otherwise, return only runs whose ``iter`` is 0.
5077
5160
  :param start_time_from: Filter by run start time in ``[start_time_from, start_time_to]``.
5078
5161
  :param start_time_to: Filter by run start time in ``[start_time_from, start_time_to]``.
@@ -5551,6 +5634,31 @@ class MlrunProject(ModelObj):
5551
5634
  **kwargs,
5552
5635
  )
5553
5636
 
5637
+ def get_drift_over_time(
5638
+ self,
5639
+ start: Optional[datetime.datetime] = None,
5640
+ end: Optional[datetime.datetime] = None,
5641
+ ) -> mlrun.common.schemas.model_monitoring.ModelEndpointDriftValues:
5642
+ """
5643
+ Get drift counts over time for the project.
5644
+
5645
+ This method returns a list of tuples, each representing a time-interval (in a granularity set by the
5646
+ duration of the given time range) and the number of suspected drifts and detected drifts in that interval.
5647
+ For a range of 6 hours or less, the granularity is 10 minute, for a range of 2 hours to 72 hours, the
5648
+ granularity is 1 hour, and for a range of more than 72 hours, the granularity is 24 hours.
5649
+
5650
+ :param start: Start time of the range to retrieve drift counts from.
5651
+ :param end: End time of the range to retrieve drift counts from.
5652
+
5653
+ :return: A ModelEndpointDriftValues object containing the drift counts over time.
5654
+ """
5655
+ db = mlrun.db.get_run_db(secrets=self._secrets)
5656
+ return db.get_drift_over_time(
5657
+ project=self.metadata.name,
5658
+ start=start,
5659
+ end=end,
5660
+ )
5661
+
5554
5662
  def _run_authenticated_git_action(
5555
5663
  self,
5556
5664
  action: Callable,