mlrun 1.8.0rc4__py3-none-any.whl → 1.8.0rc7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (75) hide show
  1. mlrun/__init__.py +5 -3
  2. mlrun/alerts/alert.py +129 -2
  3. mlrun/artifacts/__init__.py +1 -1
  4. mlrun/artifacts/base.py +12 -1
  5. mlrun/artifacts/document.py +59 -38
  6. mlrun/common/constants.py +1 -0
  7. mlrun/common/model_monitoring/__init__.py +0 -2
  8. mlrun/common/model_monitoring/helpers.py +0 -28
  9. mlrun/common/schemas/__init__.py +2 -4
  10. mlrun/common/schemas/alert.py +80 -1
  11. mlrun/common/schemas/artifact.py +4 -0
  12. mlrun/common/schemas/client_spec.py +0 -1
  13. mlrun/common/schemas/model_monitoring/__init__.py +0 -6
  14. mlrun/common/schemas/model_monitoring/constants.py +11 -9
  15. mlrun/common/schemas/model_monitoring/model_endpoints.py +77 -149
  16. mlrun/common/schemas/notification.py +6 -0
  17. mlrun/common/schemas/project.py +3 -0
  18. mlrun/config.py +2 -3
  19. mlrun/datastore/datastore_profile.py +57 -17
  20. mlrun/datastore/sources.py +1 -2
  21. mlrun/datastore/vectorstore.py +67 -59
  22. mlrun/db/base.py +29 -19
  23. mlrun/db/factory.py +0 -3
  24. mlrun/db/httpdb.py +224 -161
  25. mlrun/db/nopdb.py +36 -17
  26. mlrun/execution.py +46 -32
  27. mlrun/feature_store/api.py +1 -0
  28. mlrun/model.py +7 -0
  29. mlrun/model_monitoring/__init__.py +3 -2
  30. mlrun/model_monitoring/api.py +55 -53
  31. mlrun/model_monitoring/applications/_application_steps.py +4 -2
  32. mlrun/model_monitoring/applications/base.py +165 -6
  33. mlrun/model_monitoring/applications/context.py +88 -37
  34. mlrun/model_monitoring/applications/evidently_base.py +0 -1
  35. mlrun/model_monitoring/applications/histogram_data_drift.py +3 -7
  36. mlrun/model_monitoring/controller.py +43 -37
  37. mlrun/model_monitoring/db/__init__.py +0 -2
  38. mlrun/model_monitoring/db/tsdb/base.py +2 -1
  39. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +2 -1
  40. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +43 -0
  41. mlrun/model_monitoring/helpers.py +79 -66
  42. mlrun/model_monitoring/stream_processing.py +83 -270
  43. mlrun/model_monitoring/writer.py +1 -10
  44. mlrun/projects/pipelines.py +37 -1
  45. mlrun/projects/project.py +171 -74
  46. mlrun/run.py +40 -0
  47. mlrun/runtimes/nuclio/function.py +7 -6
  48. mlrun/runtimes/nuclio/serving.py +9 -2
  49. mlrun/serving/routers.py +158 -145
  50. mlrun/serving/server.py +6 -0
  51. mlrun/serving/states.py +21 -7
  52. mlrun/serving/v2_serving.py +70 -61
  53. mlrun/utils/helpers.py +14 -30
  54. mlrun/utils/notifications/notification/mail.py +36 -9
  55. mlrun/utils/notifications/notification_pusher.py +43 -18
  56. mlrun/utils/version/version.json +2 -2
  57. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/METADATA +5 -4
  58. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/RECORD +62 -75
  59. mlrun/common/schemas/model_monitoring/model_endpoint_v2.py +0 -149
  60. mlrun/model_monitoring/db/stores/__init__.py +0 -136
  61. mlrun/model_monitoring/db/stores/base/__init__.py +0 -15
  62. mlrun/model_monitoring/db/stores/base/store.py +0 -154
  63. mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
  64. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -46
  65. mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -93
  66. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -47
  67. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -25
  68. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -408
  69. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
  70. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -464
  71. mlrun/model_monitoring/model_endpoint.py +0 -120
  72. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/LICENSE +0 -0
  73. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/WHEEL +0 -0
  74. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/entry_points.txt +0 -0
  75. {mlrun-1.8.0rc4.dist-info → mlrun-1.8.0rc7.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -28,7 +28,7 @@ import warnings
28
28
  import zipfile
29
29
  from copy import deepcopy
30
30
  from os import environ, makedirs, path
31
- from typing import Callable, Optional, Union
31
+ from typing import Callable, Optional, Union, cast
32
32
 
33
33
  import dotenv
34
34
  import git
@@ -59,13 +59,17 @@ import mlrun.utils
59
59
  import mlrun.utils.regex
60
60
  import mlrun_pipelines.common.models
61
61
  from mlrun.alerts.alert import AlertConfig
62
+ from mlrun.common.schemas import alert as alert_constants
62
63
  from mlrun.datastore.datastore_profile import (
63
64
  DatastoreProfile,
64
65
  DatastoreProfile2Json,
65
- VectorStoreProfile,
66
66
  datastore_profile_read,
67
67
  )
68
68
  from mlrun.datastore.vectorstore import VectorStoreCollection
69
+ from mlrun.model_monitoring.helpers import (
70
+ filter_results_by_regex,
71
+ get_result_instance_fqn,
72
+ )
69
73
  from mlrun.runtimes.nuclio.function import RemoteRuntime
70
74
  from mlrun_pipelines.models import PipelineNodeWrapper
71
75
 
@@ -1535,7 +1539,9 @@ class MlrunProject(ModelObj):
1535
1539
 
1536
1540
  def update_artifact(self, artifact_object: Artifact):
1537
1541
  artifacts_manager = self._get_artifact_manager()
1538
- artifacts_manager.update_artifact(artifact_object, artifact_object)
1542
+ project_tag = self._get_project_tag()
1543
+ producer, _ = self._resolve_artifact_producer(artifact_object, project_tag)
1544
+ artifacts_manager.update_artifact(producer, artifact_object)
1539
1545
 
1540
1546
  def _get_artifact_manager(self):
1541
1547
  if self._artifact_manager:
@@ -1732,7 +1738,7 @@ class MlrunProject(ModelObj):
1732
1738
  :param upload: upload to datastore (default is True)
1733
1739
  :param labels: a set of key/value labels to tag the artifact with
1734
1740
 
1735
- :returns: artifact object
1741
+ :returns: dataset artifact object
1736
1742
  """
1737
1743
  ds = DatasetArtifact(
1738
1744
  key,
@@ -1745,14 +1751,17 @@ class MlrunProject(ModelObj):
1745
1751
  **kwargs,
1746
1752
  )
1747
1753
 
1748
- item = self.log_artifact(
1749
- ds,
1750
- local_path=local_path,
1751
- artifact_path=artifact_path,
1752
- target_path=target_path,
1753
- tag=tag,
1754
- upload=upload,
1755
- labels=labels,
1754
+ item = cast(
1755
+ DatasetArtifact,
1756
+ self.log_artifact(
1757
+ ds,
1758
+ local_path=local_path,
1759
+ artifact_path=artifact_path,
1760
+ target_path=target_path,
1761
+ tag=tag,
1762
+ upload=upload,
1763
+ labels=labels,
1764
+ ),
1756
1765
  )
1757
1766
  return item
1758
1767
 
@@ -1820,7 +1829,7 @@ class MlrunProject(ModelObj):
1820
1829
  :param extra_data: key/value list of extra files/charts to link with this dataset
1821
1830
  value can be absolute path | relative path (to model dir) | bytes | artifact object
1822
1831
 
1823
- :returns: artifact object
1832
+ :returns: model artifact object
1824
1833
  """
1825
1834
 
1826
1835
  if training_set is not None and inputs:
@@ -1847,79 +1856,72 @@ class MlrunProject(ModelObj):
1847
1856
  if training_set is not None:
1848
1857
  model.infer_from_df(training_set, label_column)
1849
1858
 
1850
- item = self.log_artifact(
1851
- model,
1852
- artifact_path=artifact_path,
1853
- tag=tag,
1854
- upload=upload,
1855
- labels=labels,
1859
+ item = cast(
1860
+ ModelArtifact,
1861
+ self.log_artifact(
1862
+ model,
1863
+ artifact_path=artifact_path,
1864
+ tag=tag,
1865
+ upload=upload,
1866
+ labels=labels,
1867
+ ),
1856
1868
  )
1857
1869
  return item
1858
1870
 
1859
- def get_or_create_vector_store_collection(
1871
+ def get_vector_store_collection(
1860
1872
  self,
1861
1873
  collection_name: str,
1862
- profile: Union[str, VectorStoreProfile],
1863
- **kwargs,
1874
+ vector_store: "VectorStore", # noqa: F821
1864
1875
  ) -> VectorStoreCollection:
1865
- """
1866
- Create or retrieve a VectorStoreCollection.
1867
-
1868
- :param collection_name: Name of the collection
1869
- :param profile: Name of the VectorStoreProfile or a VectorStoreProfile object
1870
- :param kwargs: Additional arguments for the VectorStoreCollection
1871
- :return: VectorStoreCollection object
1872
- """
1873
- if isinstance(profile, str):
1874
- profile = datastore_profile_read(f"ds://{profile}")
1875
-
1876
- if not isinstance(profile, VectorStoreProfile):
1877
- raise ValueError(
1878
- "Profile must be a VectorStoreProfile object or a profile name"
1879
- )
1880
1876
  return VectorStoreCollection(
1881
- profile.vector_store_class,
1882
1877
  self,
1883
- profile.name,
1884
1878
  collection_name,
1885
- **profile.attributes(kwargs),
1879
+ vector_store,
1886
1880
  )
1887
1881
 
1888
1882
  def log_document(
1889
1883
  self,
1890
1884
  key: str,
1891
- artifact_path: Optional[str] = None,
1892
- document_loader: DocumentLoaderSpec = DocumentLoaderSpec(),
1893
1885
  tag: str = "",
1886
+ local_path: str = "",
1887
+ artifact_path: Optional[str] = None,
1888
+ document_loader_spec: Optional[DocumentLoaderSpec] = None,
1894
1889
  upload: Optional[bool] = False,
1895
1890
  labels: Optional[dict[str, str]] = None,
1891
+ target_path: Optional[str] = None,
1896
1892
  **kwargs,
1897
1893
  ) -> DocumentArtifact:
1898
1894
  """
1899
1895
  Log a document as an artifact.
1900
1896
 
1901
1897
  :param key: Artifact key
1902
- :param target_path: Path to the local file
1903
- :param artifact_path: Target path for artifact storage
1904
- :param document_loader: Spec to use to load the artifact as langchain document
1905
1898
  :param tag: Version tag
1899
+ :param local_path: path to the local file we upload, will also be use
1900
+ as the destination subpath (under "artifact_path")
1901
+ :param artifact_path: Target path for artifact storage
1902
+ :param document_loader_spec: Spec to use to load the artifact as langchain document
1906
1903
  :param upload: Whether to upload the artifact
1907
1904
  :param labels: Key-value labels
1905
+ :param target_path: Target file path
1908
1906
  :param kwargs: Additional keyword arguments
1909
1907
  :return: DocumentArtifact object
1910
1908
  """
1911
1909
  doc_artifact = DocumentArtifact(
1912
1910
  key=key,
1913
- document_loader=document_loader,
1911
+ original_source=local_path or target_path,
1912
+ document_loader_spec=document_loader_spec
1913
+ if document_loader_spec
1914
+ else DocumentLoaderSpec(),
1914
1915
  **kwargs,
1915
1916
  )
1916
-
1917
1917
  return self.log_artifact(
1918
- doc_artifact,
1919
- artifact_path=artifact_path,
1918
+ item=doc_artifact,
1920
1919
  tag=tag,
1920
+ local_path=local_path,
1921
+ artifact_path=artifact_path,
1921
1922
  upload=upload,
1922
1923
  labels=labels,
1924
+ target_path=target_path,
1923
1925
  )
1924
1926
 
1925
1927
  def import_artifact(
@@ -2034,6 +2036,85 @@ class MlrunProject(ModelObj):
2034
2036
  )
2035
2037
  return _run_project_setup(self, setup_file_path, save)
2036
2038
 
2039
+ def create_model_monitoring_alert_configs(
2040
+ self,
2041
+ name: str,
2042
+ summary: str,
2043
+ endpoints: mlrun.common.schemas.ModelEndpointList,
2044
+ events: Union[list[alert_constants.EventKind], alert_constants.EventKind],
2045
+ notifications: list[alert_constants.AlertNotification],
2046
+ result_names: Optional[
2047
+ list[str]
2048
+ ] = None, # can use wildcards - see below for explanation.
2049
+ severity: alert_constants.AlertSeverity = alert_constants.AlertSeverity.MEDIUM,
2050
+ criteria: alert_constants.AlertCriteria = alert_constants.AlertCriteria(
2051
+ count=1, period="10m"
2052
+ ),
2053
+ reset_policy: mlrun.common.schemas.alert.ResetPolicy = mlrun.common.schemas.alert.ResetPolicy.AUTO,
2054
+ ) -> list[mlrun.alerts.alert.AlertConfig]:
2055
+ """
2056
+ :param name: AlertConfig name.
2057
+ :param summary: Summary of the alert, will be sent in the generated notifications
2058
+ :param endpoints: The endpoints from which to retrieve the metrics that the
2059
+ alerts will be based on.
2060
+ :param events: AlertTrigger event types (EventKind).
2061
+ :param notifications: List of notifications to invoke once the alert is triggered
2062
+ :param result_names: Optional. Filters the result names used to create the alert configuration,
2063
+ constructed from the app and result_name regex.
2064
+
2065
+ For example:
2066
+ [`app1.result-*`, `*.result1`]
2067
+ will match "mep1.app1.result.result-1" and "mep1.app2.result.result1".
2068
+ :param severity: Severity of the alert.
2069
+ :param criteria: When the alert will be triggered based on the
2070
+ specified number of events within the defined time period.
2071
+ :param reset_policy: When to clear the alert. May be "manual" for manual reset of the alert,
2072
+ or "auto" if the criteria contains a time period.
2073
+ :returns: List of AlertConfig according to endpoints results,
2074
+ filtered by result_names.
2075
+ """
2076
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2077
+ matching_results = []
2078
+ alerts = []
2079
+ # TODO: Refactor to use a single request to improve performance at scale, ML-8473
2080
+ for endpoint in endpoints.endpoints:
2081
+ results_by_endpoint = db.get_model_endpoint_monitoring_metrics(
2082
+ project=self.name, endpoint_id=endpoint.metadata.uid, type="results"
2083
+ )
2084
+ results_fqn_by_endpoint = [
2085
+ get_result_instance_fqn(
2086
+ model_endpoint_id=endpoint.metadata.uid,
2087
+ app_name=result.app,
2088
+ result_name=result.name,
2089
+ )
2090
+ for result in results_by_endpoint
2091
+ ]
2092
+ matching_results += filter_results_by_regex(
2093
+ existing_result_names=results_fqn_by_endpoint,
2094
+ result_name_filters=result_names,
2095
+ )
2096
+ for result_fqn in matching_results:
2097
+ alerts.append(
2098
+ mlrun.alerts.alert.AlertConfig(
2099
+ project=self.name,
2100
+ name=name,
2101
+ summary=summary,
2102
+ severity=severity,
2103
+ entities=alert_constants.EventEntities(
2104
+ kind=alert_constants.EventEntityKind.MODEL_ENDPOINT_RESULT,
2105
+ project=self.name,
2106
+ ids=[result_fqn],
2107
+ ),
2108
+ trigger=alert_constants.AlertTrigger(
2109
+ events=events if isinstance(events, list) else [events]
2110
+ ),
2111
+ criteria=criteria,
2112
+ notifications=notifications,
2113
+ reset_policy=reset_policy,
2114
+ )
2115
+ )
2116
+ return alerts
2117
+
2037
2118
  def set_model_monitoring_function(
2038
2119
  self,
2039
2120
  func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
@@ -2408,7 +2489,7 @@ class MlrunProject(ModelObj):
2408
2489
 
2409
2490
  def set_function(
2410
2491
  self,
2411
- func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
2492
+ func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
2412
2493
  name: str = "",
2413
2494
  kind: str = "job",
2414
2495
  image: Optional[str] = None,
@@ -3407,7 +3488,6 @@ class MlrunProject(ModelObj):
3407
3488
  def set_model_monitoring_credentials(
3408
3489
  self,
3409
3490
  access_key: Optional[str] = None,
3410
- endpoint_store_connection: Optional[str] = None,
3411
3491
  stream_path: Optional[str] = None,
3412
3492
  tsdb_connection: Optional[str] = None,
3413
3493
  replace_creds: bool = False,
@@ -3418,7 +3498,6 @@ class MlrunProject(ModelObj):
3418
3498
  model monitoring or serving function.
3419
3499
 
3420
3500
  :param access_key: Model monitoring access key for managing user permissions.
3421
- :param endpoint_store_connection: Endpoint store connection string. By default, None. Options:
3422
3501
 
3423
3502
  * None - will be set from the system configuration.
3424
3503
  * v3io - for v3io endpoint store, pass `v3io` and the system will generate the
@@ -3451,7 +3530,6 @@ class MlrunProject(ModelObj):
3451
3530
  project=self.name,
3452
3531
  credentials={
3453
3532
  "access_key": access_key,
3454
- "endpoint_store_connection": endpoint_store_connection,
3455
3533
  "stream_path": stream_path,
3456
3534
  "tsdb_connection": tsdb_connection,
3457
3535
  },
@@ -3469,29 +3547,33 @@ class MlrunProject(ModelObj):
3469
3547
 
3470
3548
  def list_model_endpoints(
3471
3549
  self,
3472
- model: Optional[str] = None,
3473
- function: Optional[str] = None,
3550
+ name: Optional[str] = None,
3551
+ model_name: Optional[str] = None,
3552
+ function_name: Optional[str] = None,
3474
3553
  labels: Optional[list[str]] = None,
3475
- start: str = "now-1h",
3476
- end: str = "now",
3554
+ start: Optional[datetime.datetime] = None,
3555
+ end: Optional[datetime.datetime] = None,
3477
3556
  top_level: bool = False,
3478
3557
  uids: Optional[list[str]] = None,
3479
- ) -> list[mlrun.model_monitoring.model_endpoint.ModelEndpoint]:
3558
+ ) -> mlrun.common.schemas.ModelEndpointList:
3480
3559
  """
3481
3560
  Returns a list of `ModelEndpoint` objects. Each `ModelEndpoint` object represents the current state of a
3482
3561
  model endpoint. This functions supports filtering by the following parameters:
3483
- 1) model
3484
- 2) function
3485
- 3) labels
3486
- 4) top level
3487
- 5) uids
3562
+ 1) name
3563
+ 2) model_name
3564
+ 3) function_name
3565
+ 4) labels
3566
+ 5) top level
3567
+ 6) uids
3568
+ 7) start and end time, corresponding to the `created` field.
3488
3569
  By default, when no filters are applied, all available endpoints for the given project will be listed.
3489
3570
 
3490
3571
  In addition, this functions provides a facade for listing endpoint related metrics. This facade is time-based
3491
3572
  and depends on the 'start' and 'end' parameters.
3492
3573
 
3493
- :param model: The name of the model to filter by
3494
- :param function: The name of the function to filter by
3574
+ :param name: The name of the model to filter by
3575
+ :param model_name: The name of the model to filter by
3576
+ :param function_name: The name of the function to filter by
3495
3577
  :param labels: Filter model endpoints by label key-value pairs or key existence. This can be provided as:
3496
3578
  - A dictionary in the format `{"label": "value"}` to match specific label key-value pairs,
3497
3579
  or `{"label": None}` to check for key existence.
@@ -3499,12 +3581,8 @@ class MlrunProject(ModelObj):
3499
3581
  or just `"label"` for key existence.
3500
3582
  - A comma-separated string formatted as `"label1=value1,label2"` to match entities with
3501
3583
  the specified key-value pairs or key existence.
3502
- :param start: The start time of the metrics. Can be represented by a string containing an RFC 3339 time, a
3503
- Unix timestamp in milliseconds, a relative time (`'now'` or `'now-[0-9]+[mhd]'`, where
3504
- `m` = minutes, `h` = hours, `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3505
- :param end: The end time of the metrics. Can be represented by a string containing an RFC 3339 time, a
3506
- Unix timestamp in milliseconds, a relative time (`'now'` or `'now-[0-9]+[mhd]'`, where
3507
- `m` = minutes, `h` = hours, `'d'` = days, and `'s'` = seconds), or 0 for the earliest time.
3584
+ :param start: The start time to filter by.Corresponding to the `created` field.
3585
+ :param end: The end time to filter by. Corresponding to the `created` field.
3508
3586
  :param top_level: if true will return only routers and endpoint that are NOT children of any router
3509
3587
  :param uids: if passed will return a list `ModelEndpoint` object with uid in uids
3510
3588
 
@@ -3513,8 +3591,9 @@ class MlrunProject(ModelObj):
3513
3591
  db = mlrun.db.get_run_db(secrets=self._secrets)
3514
3592
  return db.list_model_endpoints(
3515
3593
  project=self.name,
3516
- model=model,
3517
- function=function,
3594
+ name=name,
3595
+ model_name=model_name,
3596
+ function_name=function_name,
3518
3597
  labels=labels,
3519
3598
  start=start,
3520
3599
  end=end,
@@ -4496,6 +4575,25 @@ class MlrunProject(ModelObj):
4496
4575
  profile, self.name
4497
4576
  )
4498
4577
 
4578
+ def get_config_profile_attributes(self, name: str) -> dict:
4579
+ """
4580
+ Get the merged attributes from a named configuration profile.
4581
+
4582
+ Retrieves a profile from the datastore using the provided name and returns its
4583
+ merged public and private attributes as a dictionary.
4584
+
4585
+ Args:
4586
+ name (str): Name of the configuration profile to retrieve. Will be prefixed
4587
+ with "ds://" to form the full profile path.
4588
+
4589
+ Returns:
4590
+ dict: The merged attributes dictionary containing both public and private
4591
+ configuration settings from the profile. Returns nested dictionaries if
4592
+ the profile contains nested configurations.
4593
+ """
4594
+ profile = datastore_profile_read(f"ds://{name}", self.name)
4595
+ return profile.attributes()
4596
+
4499
4597
  def delete_datastore_profile(self, profile: str):
4500
4598
  mlrun.db.get_run_db(secrets=self._secrets).delete_datastore_profile(
4501
4599
  profile, self.name
@@ -4840,7 +4938,6 @@ class MlrunProject(ModelObj):
4840
4938
  page=page,
4841
4939
  page_size=page_size,
4842
4940
  page_token=page_token,
4843
- return_all=False,
4844
4941
  **kwargs,
4845
4942
  )
4846
4943
 
mlrun/run.py CHANGED
@@ -909,6 +909,46 @@ def _run_pipeline(
909
909
  return pipeline_run_id
910
910
 
911
911
 
912
+ def retry_pipeline(
913
+ run_id: str,
914
+ project: Optional[str] = None,
915
+ namespace: Optional[str] = None,
916
+ ) -> str:
917
+ """Retry a pipeline run.
918
+
919
+ This function retries a previously executed pipeline run using the specified run ID. If the run is not in a
920
+ retryable state, a new run is created as a clone of the original run.
921
+
922
+ :param run_id: ID of the pipeline run to retry.
923
+ :param project: Optional; name of the project associated with the pipeline run.
924
+ :param namespace: Optional; Kubernetes namespace to use if not the default.
925
+
926
+ :returns: ID of the retried pipeline run or the ID of a cloned run if the original run is not retryable.
927
+ :raises ValueError: If access to the remote API service is not available.
928
+ """
929
+ mldb = mlrun.db.get_run_db()
930
+ if mldb.kind != "http":
931
+ raise ValueError(
932
+ "Retrying a pipeline requires access to remote API service. "
933
+ "Please set the dbpath URL."
934
+ )
935
+
936
+ pipeline_run_id = mldb.retry_pipeline(
937
+ run_id=run_id,
938
+ project=project,
939
+ namespace=namespace,
940
+ )
941
+ if pipeline_run_id == run_id:
942
+ logger.info(
943
+ f"Retried pipeline run ID={pipeline_run_id}, check UI for progress."
944
+ )
945
+ else:
946
+ logger.info(
947
+ f"Copy of pipeline {run_id} was retried as run ID={pipeline_run_id}, check UI for progress."
948
+ )
949
+ return pipeline_run_id
950
+
951
+
912
952
  def wait_for_pipeline_completion(
913
953
  run_id,
914
954
  timeout=60 * 60,
@@ -1192,9 +1192,6 @@ class RemoteRuntime(KubeResource):
1192
1192
  return results
1193
1193
 
1194
1194
  def _resolve_invocation_url(self, path, force_external_address):
1195
- if not path.startswith("/") and path != "":
1196
- path = f"/{path}"
1197
-
1198
1195
  # internal / external invocation urls is a nuclio >= 1.6.x feature
1199
1196
  # try to infer the invocation url from the internal and if not exists, use external.
1200
1197
  # $$$$ we do not want to use the external invocation url (e.g.: ingress, nodePort, etc.)
@@ -1203,12 +1200,16 @@ class RemoteRuntime(KubeResource):
1203
1200
  and self.status.internal_invocation_urls
1204
1201
  and mlrun.k8s_utils.is_running_inside_kubernetes_cluster()
1205
1202
  ):
1206
- return f"http://{self.status.internal_invocation_urls[0]}{path}"
1203
+ return mlrun.utils.helpers.join_urls(
1204
+ f"http://{self.status.internal_invocation_urls[0]}", path
1205
+ )
1207
1206
 
1208
1207
  if self.status.external_invocation_urls:
1209
- return f"http://{self.status.external_invocation_urls[0]}{path}"
1208
+ return mlrun.utils.helpers.join_urls(
1209
+ f"http://{self.status.external_invocation_urls[0]}", path
1210
+ )
1210
1211
  else:
1211
- return f"http://{self.status.address}{path}"
1212
+ return mlrun.utils.helpers.join_urls(f"http://{self.status.address}", path)
1212
1213
 
1213
1214
  def _update_credentials_from_remote_build(self, remote_data):
1214
1215
  self.metadata.credentials = remote_data.get("metadata", {}).get(
@@ -39,7 +39,7 @@ from mlrun.serving.states import (
39
39
  )
40
40
  from mlrun.utils import get_caller_globals, logger, set_paths
41
41
 
42
- from .function import NuclioSpec, RemoteRuntime
42
+ from .function import NuclioSpec, RemoteRuntime, min_nuclio_versions
43
43
 
44
44
  serving_subkind = "serving_v2"
45
45
 
@@ -577,6 +577,7 @@ class ServingRuntime(RemoteRuntime):
577
577
  self.spec.secret_sources.append({"kind": kind, "source": source})
578
578
  return self
579
579
 
580
+ @min_nuclio_versions("1.12.10")
580
581
  def deploy(
581
582
  self,
582
583
  project="",
@@ -644,9 +645,12 @@ class ServingRuntime(RemoteRuntime):
644
645
 
645
646
  def _get_serving_spec(self):
646
647
  function_name_uri_map = {f.name: f.uri(self) for f in self.spec.function_refs}
647
-
648
648
  serving_spec = {
649
+ "function_name": self.metadata.name,
650
+ "function_tag": self.metadata.tag,
649
651
  "function_uri": self._function_uri(),
652
+ "function_hash": self.metadata.hash,
653
+ "project": self.metadata.project,
650
654
  "version": "v2",
651
655
  "parameters": self.spec.parameters,
652
656
  "graph": self.spec.graph.to_dict() if self.spec.graph else {},
@@ -707,6 +711,9 @@ class ServingRuntime(RemoteRuntime):
707
711
  function_uri=self._function_uri(),
708
712
  secret_sources=self.spec.secret_sources,
709
713
  default_content_type=self.spec.default_content_type,
714
+ function_name=self.metadata.name,
715
+ function_tag=self.metadata.tag,
716
+ project=self.metadata.project,
710
717
  **kwargs,
711
718
  )
712
719
  server.init_states(