mlrun 1.7.0rc6__py3-none-any.whl → 1.7.0rc8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (70) hide show
  1. mlrun/__main__.py +2 -0
  2. mlrun/common/constants.py +6 -0
  3. mlrun/common/schemas/__init__.py +3 -0
  4. mlrun/common/schemas/api_gateway.py +8 -1
  5. mlrun/common/schemas/model_monitoring/__init__.py +4 -0
  6. mlrun/common/schemas/model_monitoring/constants.py +35 -18
  7. mlrun/common/schemas/project.py +1 -0
  8. mlrun/common/types.py +7 -1
  9. mlrun/config.py +34 -10
  10. mlrun/data_types/data_types.py +4 -0
  11. mlrun/datastore/alibaba_oss.py +130 -0
  12. mlrun/datastore/azure_blob.py +4 -5
  13. mlrun/datastore/base.py +22 -16
  14. mlrun/datastore/datastore.py +4 -0
  15. mlrun/datastore/datastore_profile.py +7 -0
  16. mlrun/datastore/google_cloud_storage.py +1 -1
  17. mlrun/datastore/sources.py +2 -3
  18. mlrun/datastore/targets.py +6 -1
  19. mlrun/db/base.py +14 -6
  20. mlrun/db/httpdb.py +61 -56
  21. mlrun/db/nopdb.py +3 -0
  22. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +6 -1
  23. mlrun/frameworks/tf_keras/mlrun_interface.py +20 -8
  24. mlrun/kfpops.py +2 -5
  25. mlrun/model.py +1 -0
  26. mlrun/model_monitoring/__init__.py +1 -1
  27. mlrun/model_monitoring/api.py +104 -295
  28. mlrun/model_monitoring/controller.py +25 -25
  29. mlrun/model_monitoring/db/__init__.py +16 -0
  30. mlrun/model_monitoring/{stores → db/stores}/__init__.py +43 -34
  31. mlrun/model_monitoring/db/stores/base/__init__.py +15 -0
  32. mlrun/model_monitoring/{stores/model_endpoint_store.py → db/stores/base/store.py} +47 -6
  33. mlrun/model_monitoring/db/stores/sqldb/__init__.py +13 -0
  34. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +49 -0
  35. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/base.py +76 -3
  36. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +68 -0
  37. mlrun/model_monitoring/{stores → db/stores/sqldb}/models/sqlite.py +13 -1
  38. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +662 -0
  39. mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +13 -0
  40. mlrun/model_monitoring/{stores/kv_model_endpoint_store.py → db/stores/v3io_kv/kv_store.py} +134 -3
  41. mlrun/model_monitoring/helpers.py +3 -3
  42. mlrun/model_monitoring/stream_processing.py +41 -9
  43. mlrun/model_monitoring/tracking_policy.py +7 -1
  44. mlrun/model_monitoring/writer.py +4 -36
  45. mlrun/projects/pipelines.py +14 -2
  46. mlrun/projects/project.py +118 -103
  47. mlrun/run.py +5 -1
  48. mlrun/runtimes/base.py +6 -0
  49. mlrun/runtimes/nuclio/api_gateway.py +218 -65
  50. mlrun/runtimes/nuclio/function.py +3 -0
  51. mlrun/runtimes/nuclio/serving.py +28 -32
  52. mlrun/runtimes/pod.py +26 -0
  53. mlrun/serving/routers.py +4 -3
  54. mlrun/serving/server.py +4 -6
  55. mlrun/serving/states.py +34 -14
  56. mlrun/serving/v2_serving.py +4 -3
  57. mlrun/utils/helpers.py +34 -0
  58. mlrun/utils/http.py +1 -1
  59. mlrun/utils/retryer.py +1 -0
  60. mlrun/utils/version/version.json +2 -2
  61. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/METADATA +25 -16
  62. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/RECORD +66 -62
  63. mlrun/model_monitoring/batch.py +0 -933
  64. mlrun/model_monitoring/stores/models/__init__.py +0 -27
  65. mlrun/model_monitoring/stores/models/mysql.py +0 -34
  66. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +0 -382
  67. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/LICENSE +0 -0
  68. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/WHEEL +0 -0
  69. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/entry_points.txt +0 -0
  70. {mlrun-1.7.0rc6.dist-info → mlrun-1.7.0rc8.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -11,6 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+
14
15
  import datetime
15
16
  import getpass
16
17
  import glob
@@ -44,6 +45,7 @@ import mlrun.runtimes
44
45
  import mlrun.runtimes.nuclio.api_gateway
45
46
  import mlrun.runtimes.pod
46
47
  import mlrun.runtimes.utils
48
+ import mlrun.serving
47
49
  import mlrun.utils.regex
48
50
  from mlrun.datastore.datastore_profile import DatastoreProfile, DatastoreProfile2Json
49
51
  from mlrun.runtimes.nuclio.function import RemoteRuntime
@@ -55,7 +57,6 @@ from ..features import Feature
55
57
  from ..model import EntrypointParam, ImageBuilder, ModelObj
56
58
  from ..model_monitoring.application import (
57
59
  ModelMonitoringApplicationBase,
58
- PushToMonitoringWriter,
59
60
  )
60
61
  from ..run import code_to_function, get_object, import_function, new_function
61
62
  from ..secrets import SecretsStore
@@ -128,6 +129,7 @@ def new_project(
128
129
  save: bool = True,
129
130
  overwrite: bool = False,
130
131
  parameters: dict = None,
132
+ default_function_node_selector: dict = None,
131
133
  ) -> "MlrunProject":
132
134
  """Create a new MLRun project, optionally load it from a yaml/zip/git template
133
135
 
@@ -181,6 +183,7 @@ def new_project(
181
183
  :param overwrite: overwrite project using 'cascade' deletion strategy (deletes project resources)
182
184
  if project with name exists
183
185
  :param parameters: key/value pairs to add to the project.spec.params
186
+ :param default_function_node_selector: defines the default node selector for scheduling functions within the project
184
187
 
185
188
  :returns: project object
186
189
  """
@@ -227,6 +230,11 @@ def new_project(
227
230
  project.spec.origin_url = url
228
231
  if description:
229
232
  project.spec.description = description
233
+
234
+ if default_function_node_selector:
235
+ for key, val in default_function_node_selector.items():
236
+ project.spec.default_function_node_selector[key] = val
237
+
230
238
  if parameters:
231
239
  # Enable setting project parameters at load time, can be used to customize the project_setup
232
240
  for key, val in parameters.items():
@@ -760,6 +768,7 @@ class ProjectSpec(ModelObj):
760
768
  default_image=None,
761
769
  build=None,
762
770
  custom_packagers: list[tuple[str, bool]] = None,
771
+ default_function_node_selector=None,
763
772
  ):
764
773
  self.repo = None
765
774
 
@@ -799,6 +808,7 @@ class ProjectSpec(ModelObj):
799
808
  # in a tuple where the first index is the packager module's path (str) and the second is a flag (bool) for
800
809
  # whether it is mandatory for a run (raise exception on collection error) or not.
801
810
  self.custom_packagers = custom_packagers or []
811
+ self.default_function_node_selector = default_function_node_selector or {}
802
812
 
803
813
  @property
804
814
  def source(self) -> str:
@@ -1844,10 +1854,10 @@ class MlrunProject(ModelObj):
1844
1854
  monitoring application's constructor.
1845
1855
  """
1846
1856
 
1847
- if name in mm_constants.MonitoringFunctionNames.all():
1857
+ if name in mm_constants.MonitoringFunctionNames.list():
1848
1858
  raise mlrun.errors.MLRunInvalidArgumentError(
1849
- f"Application name can not be on of the following name : "
1850
- f"{mm_constants.MonitoringFunctionNames.all()}"
1859
+ f"An application cannot have the following names: "
1860
+ f"{mm_constants.MonitoringFunctionNames.list()}"
1851
1861
  )
1852
1862
  function_object: RemoteRuntime = None
1853
1863
  (
@@ -1866,16 +1876,6 @@ class MlrunProject(ModelObj):
1866
1876
  requirements_file,
1867
1877
  **application_kwargs,
1868
1878
  )
1869
- models_names = "all"
1870
- function_object.set_label(
1871
- mm_constants.ModelMonitoringAppLabel.KEY,
1872
- mm_constants.ModelMonitoringAppLabel.VAL,
1873
- )
1874
- function_object.set_label("models", models_names)
1875
-
1876
- if not mlrun.mlconf.is_ce_mode():
1877
- function_object.apply(mlrun.mount_v3io())
1878
-
1879
1879
  # save to project spec
1880
1880
  self.spec.set_function(resolved_function_name, function_object, func)
1881
1881
 
@@ -1934,49 +1934,38 @@ class MlrunProject(ModelObj):
1934
1934
 
1935
1935
  def _instantiate_model_monitoring_function(
1936
1936
  self,
1937
- func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
1938
- application_class: typing.Union[str, ModelMonitoringApplicationBase] = None,
1939
- name: str = None,
1940
- image: str = None,
1941
- handler: str = None,
1942
- with_repo: bool = None,
1943
- tag: str = None,
1944
- requirements: typing.Union[str, list[str]] = None,
1937
+ func: typing.Union[str, mlrun.runtimes.BaseRuntime, None] = None,
1938
+ application_class: typing.Union[
1939
+ str, ModelMonitoringApplicationBase, None
1940
+ ] = None,
1941
+ name: typing.Optional[str] = None,
1942
+ image: typing.Optional[str] = None,
1943
+ handler: typing.Optional[str] = None,
1944
+ with_repo: typing.Optional[bool] = None,
1945
+ tag: typing.Optional[str] = None,
1946
+ requirements: typing.Union[str, list[str], None] = None,
1945
1947
  requirements_file: str = "",
1946
1948
  **application_kwargs,
1947
1949
  ) -> tuple[str, mlrun.runtimes.BaseRuntime, dict]:
1950
+ import mlrun.model_monitoring.api
1951
+
1948
1952
  function_object: RemoteRuntime = None
1949
1953
  kind = None
1950
1954
  if (isinstance(func, str) or func is None) and application_class is not None:
1951
- kind = "serving"
1952
- if func is None:
1953
- func = ""
1954
- func = mlrun.code_to_function(
1955
- filename=func,
1955
+ kind = mlrun.run.RuntimeKinds.serving
1956
+ func = mlrun.model_monitoring.api._create_model_monitoring_function_base(
1957
+ project=self.name,
1958
+ func=func,
1959
+ application_class=application_class,
1956
1960
  name=name,
1957
- project=self.metadata.name,
1958
- tag=tag,
1959
- kind=kind,
1960
1961
  image=image,
1962
+ tag=tag,
1961
1963
  requirements=requirements,
1962
1964
  requirements_file=requirements_file,
1965
+ **application_kwargs,
1963
1966
  )
1964
- graph = func.set_topology("flow")
1965
- if isinstance(application_class, str):
1966
- first_step = graph.to(
1967
- class_name=application_class, **application_kwargs
1968
- )
1969
- else:
1970
- first_step = graph.to(class_name=application_class)
1971
- first_step.to(
1972
- class_name=PushToMonitoringWriter(
1973
- project=self.metadata.name,
1974
- writer_application_name=mm_constants.MonitoringFunctionNames.WRITER,
1975
- stream_uri=None,
1976
- ),
1977
- ).respond()
1978
1967
  elif isinstance(func, str) and isinstance(handler, str):
1979
- kind = "nuclio"
1968
+ kind = mlrun.run.RuntimeKinds.nuclio
1980
1969
 
1981
1970
  (
1982
1971
  resolved_function_name,
@@ -1994,12 +1983,10 @@ class MlrunProject(ModelObj):
1994
1983
  requirements,
1995
1984
  requirements_file,
1996
1985
  )
1997
- models_names = "all"
1998
1986
  function_object.set_label(
1999
1987
  mm_constants.ModelMonitoringAppLabel.KEY,
2000
1988
  mm_constants.ModelMonitoringAppLabel.VAL,
2001
1989
  )
2002
- function_object.set_label("models", models_names)
2003
1990
 
2004
1991
  if not mlrun.mlconf.is_ce_mode():
2005
1992
  function_object.apply(mlrun.mount_v3io())
@@ -2029,8 +2016,6 @@ class MlrunProject(ModelObj):
2029
2016
  stream & histogram data drift functions, which are real time nuclio
2030
2017
  functions. By default, the image is mlrun/mlrun.
2031
2018
  :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
2032
-
2033
- :returns: model monitoring controller job as a dictionary.
2034
2019
  """
2035
2020
  if default_controller_image != "mlrun/mlrun":
2036
2021
  # TODO: Remove this in 1.9.0
@@ -2045,18 +2030,24 @@ class MlrunProject(ModelObj):
2045
2030
  project=self.name,
2046
2031
  image=image,
2047
2032
  base_period=base_period,
2033
+ deploy_histogram_data_drift_app=deploy_histogram_data_drift_app,
2048
2034
  )
2049
- if deploy_histogram_data_drift_app:
2050
- fn = self.set_model_monitoring_function(
2051
- func=str(
2052
- pathlib.Path(__file__).parent.parent
2053
- / "model_monitoring/applications/histogram_data_drift.py"
2054
- ),
2055
- name=mm_constants.MLRUN_HISTOGRAM_DATA_DRIFT_APP_NAME,
2056
- application_class="HistogramDataDriftApplication",
2057
- image=image,
2058
- )
2059
- fn.deploy()
2035
+
2036
+ def deploy_histogram_data_drift_app(
2037
+ self,
2038
+ *,
2039
+ image: str = "mlrun/mlrun",
2040
+ db: Optional[mlrun.db.RunDBInterface] = None,
2041
+ ) -> None:
2042
+ """
2043
+ Deploy the histogram data drift application.
2044
+
2045
+ :param image: The image on which the application will run.
2046
+ :param db: An optional DB object.
2047
+ """
2048
+ if db is None:
2049
+ db = mlrun.db.get_run_db(secrets=self._secrets)
2050
+ db.deploy_histogram_data_drift_app(project=self.name, image=image)
2060
2051
 
2061
2052
  def update_model_monitoring_controller(
2062
2053
  self,
@@ -2081,26 +2072,28 @@ class MlrunProject(ModelObj):
2081
2072
  image=image,
2082
2073
  )
2083
2074
 
2084
- def disable_model_monitoring(self):
2075
+ def disable_model_monitoring(
2076
+ self, *, delete_histogram_data_drift_app: bool = True
2077
+ ) -> None:
2078
+ """
2079
+ Note: This method is currently not advised for use. See ML-3432.
2080
+ Disable model monitoring by deleting the underlying functions infrastructure from MLRun database.
2081
+
2082
+ :param delete_histogram_data_drift_app: Whether to delete the histogram data drift app.
2083
+ """
2085
2084
  db = mlrun.db.get_run_db(secrets=self._secrets)
2086
- db.delete_function(
2087
- project=self.name,
2088
- name=mm_constants.MonitoringFunctionNames.APPLICATION_CONTROLLER,
2089
- )
2090
- db.delete_function(
2091
- project=self.name,
2092
- name=mm_constants.MonitoringFunctionNames.WRITER,
2093
- )
2094
- db.delete_function(
2095
- project=self.name,
2096
- name=mm_constants.MonitoringFunctionNames.STREAM,
2097
- )
2085
+ for fn_name in mm_constants.MonitoringFunctionNames.list():
2086
+ db.delete_function(project=self.name, name=fn_name)
2087
+ if delete_histogram_data_drift_app:
2088
+ db.delete_function(
2089
+ project=self.name, name=mm_constants.MLRUN_HISTOGRAM_DATA_DRIFT_APP_NAME
2090
+ )
2098
2091
 
2099
2092
  def set_function(
2100
2093
  self,
2101
2094
  func: typing.Union[str, mlrun.runtimes.BaseRuntime] = None,
2102
2095
  name: str = "",
2103
- kind: str = "",
2096
+ kind: str = "job",
2104
2097
  image: str = None,
2105
2098
  handler: str = None,
2106
2099
  with_repo: bool = None,
@@ -2747,40 +2740,41 @@ class MlrunProject(ModelObj):
2747
2740
  cleanup_ttl: int = None,
2748
2741
  notifications: list[mlrun.model.Notification] = None,
2749
2742
  ) -> _PipelineRunStatus:
2750
- """run a workflow using kubeflow pipelines
2751
-
2752
- :param name: name of the workflow
2753
- :param workflow_path:
2754
- url to a workflow file, if not a project workflow
2755
- :param arguments:
2756
- kubeflow pipelines arguments (parameters)
2757
- :param artifact_path:
2758
- target path/url for workflow artifacts, the string
2759
- '{{workflow.uid}}' will be replaced by workflow id
2760
- :param workflow_handler:
2761
- workflow function handler (for running workflow function directly)
2762
- :param namespace: kubernetes namespace if other than default
2763
- :param sync: force functions sync before run
2764
- :param watch: wait for pipeline completion
2765
- :param dirty: allow running the workflow when the git repo is dirty
2766
- :param engine: workflow engine running the workflow.
2767
- supported values are 'kfp' (default), 'local' or 'remote'.
2768
- for setting engine for remote running use 'remote:local' or 'remote:kfp'.
2769
- :param local: run local pipeline with local functions (set local=True in function.run())
2743
+ """Run a workflow using kubeflow pipelines
2744
+
2745
+ :param name: Name of the workflow
2746
+ :param workflow_path: URL to a workflow file, if not a project workflow
2747
+ :param arguments: Kubeflow pipelines arguments (parameters)
2748
+ :param artifact_path: Target path/URL for workflow artifacts, the string '{{workflow.uid}}' will be
2749
+ replaced by workflow id.
2750
+ :param workflow_handler: Workflow function handler (for running workflow function directly)
2751
+ :param namespace: Kubernetes namespace if other than default
2752
+ :param sync: Force functions sync before run
2753
+ :param watch: Wait for pipeline completion
2754
+ :param dirty: Allow running the workflow when the git repo is dirty
2755
+ :param engine: Workflow engine running the workflow.
2756
+ Supported values are 'kfp' (default), 'local' or 'remote'.
2757
+ For setting engine for remote running use 'remote:local' or 'remote:kfp'.
2758
+ :param local: Run local pipeline with local functions (set local=True in function.run())
2770
2759
  :param schedule: ScheduleCronTrigger class instance or a standard crontab expression string
2771
2760
  (which will be converted to the class using its `from_crontab` constructor),
2772
2761
  see this link for help:
2773
2762
  https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
2774
2763
  for using the pre-defined workflow's schedule, set `schedule=True`
2775
- :param timeout: timeout in seconds to wait for pipeline completion (watch will be activated)
2776
- :param source: remote source to use instead of the actual `project.spec.source` (used when engine is remote).
2777
- for other engines the source is to validate that the code is up-to-date
2764
+ :param timeout: Timeout in seconds to wait for pipeline completion (watch will be activated)
2765
+ :param source: Source to use instead of the actual `project.spec.source` (used when engine is remote).
2766
+ Can be a one of:
2767
+ 1. Remote URL which is loaded dynamically to the workflow runner.
2768
+ 2. A path to the project's context on the workflow runner's image.
2769
+ Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
2770
+ (enriched when building a project image with source, see `MlrunProject.build_image`).
2771
+ For other engines the source is used to validate that the code is up-to-date.
2778
2772
  :param cleanup_ttl:
2779
- pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2780
- workflow and all its resources are deleted)
2773
+ Pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2774
+ Workflow and all its resources are deleted)
2781
2775
  :param notifications:
2782
- list of notifications to send for workflow completion
2783
- :returns: run id
2776
+ List of notifications to send for workflow completion
2777
+ :returns: Run id
2784
2778
  """
2785
2779
 
2786
2780
  arguments = arguments or {}
@@ -3179,6 +3173,7 @@ class MlrunProject(ModelObj):
3179
3173
  requirements_file: str = None,
3180
3174
  builder_env: dict = None,
3181
3175
  extra_args: str = None,
3176
+ source_code_target_dir: str = None,
3182
3177
  ):
3183
3178
  """specify builder configuration for the project
3184
3179
 
@@ -3199,6 +3194,8 @@ class MlrunProject(ModelObj):
3199
3194
  e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
3200
3195
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3201
3196
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
3197
+ :param source_code_target_dir: Path on the image where source code would be extracted
3198
+ (by default `/home/mlrun_code`)
3202
3199
  """
3203
3200
  if not overwrite_build_params:
3204
3201
  # TODO: change overwrite_build_params default to True in 1.8.0
@@ -3222,6 +3219,7 @@ class MlrunProject(ModelObj):
3222
3219
  overwrite=overwrite_build_params,
3223
3220
  builder_env=builder_env,
3224
3221
  extra_args=extra_args,
3222
+ source_code_target_dir=source_code_target_dir,
3225
3223
  )
3226
3224
 
3227
3225
  if set_as_default and image != self.default_image:
@@ -3268,7 +3266,7 @@ class MlrunProject(ModelObj):
3268
3266
  * False: The new params are merged with the existing
3269
3267
  * True: The existing params are replaced by the new ones
3270
3268
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3271
- e.g. extra_args="--skip-tls-verify --build-arg A=val"r
3269
+ e.g. extra_args="--skip-tls-verify --build-arg A=val"
3272
3270
  :param target_dir: Path on the image where source code would be extracted (by default `/home/mlrun_code`)
3273
3271
  """
3274
3272
  if not base_image:
@@ -3336,6 +3334,11 @@ class MlrunProject(ModelObj):
3336
3334
  force_build=True,
3337
3335
  )
3338
3336
 
3337
+ # Get the enriched target dir from the function
3338
+ self.spec.build.source_code_target_dir = (
3339
+ function.spec.build.source_code_target_dir
3340
+ )
3341
+
3339
3342
  try:
3340
3343
  mlrun.db.get_run_db(secrets=self._secrets).delete_function(
3341
3344
  name=function.metadata.name
@@ -3344,7 +3347,7 @@ class MlrunProject(ModelObj):
3344
3347
  logger.warning(
3345
3348
  f"Image was successfully built, but failed to delete temporary function {function.metadata.name}."
3346
3349
  " To remove the function, attempt to manually delete it.",
3347
- exc=repr(exc),
3350
+ exc=mlrun.errors.err_to_str(exc),
3348
3351
  )
3349
3352
 
3350
3353
  return result
@@ -3749,6 +3752,18 @@ class MlrunProject(ModelObj):
3749
3752
 
3750
3753
  return mlrun.db.get_run_db().get_api_gateway(name=name, project=self.name)
3751
3754
 
3755
+ def delete_api_gateway(
3756
+ self,
3757
+ name: str,
3758
+ ):
3759
+ """
3760
+ Deletes an API gateway by name.
3761
+
3762
+ :param name: The name of the API gateway to delete.
3763
+ """
3764
+
3765
+ mlrun.db.get_run_db().delete_api_gateway(name=name, project=self.name)
3766
+
3752
3767
  def _run_authenticated_git_action(
3753
3768
  self,
3754
3769
  action: Callable,
mlrun/run.py CHANGED
@@ -389,6 +389,8 @@ def import_function_to_dict(url, secrets=None):
389
389
  code = get_in(runtime, "spec.build.functionSourceCode")
390
390
  update_in(runtime, "metadata.build.code_origin", url)
391
391
  cmd = code_file = get_in(runtime, "spec.command", "")
392
+ # use kind = "job" by default if not specified
393
+ runtime.setdefault("kind", "job")
392
394
  if " " in cmd:
393
395
  code_file = cmd[: cmd.find(" ")]
394
396
  if runtime["kind"] in ["", "local"]:
@@ -847,6 +849,7 @@ def _run_pipeline(
847
849
  ops=None,
848
850
  url=None,
849
851
  cleanup_ttl=None,
852
+ timeout=60,
850
853
  ):
851
854
  """remote KubeFlow pipeline execution
852
855
 
@@ -884,6 +887,7 @@ def _run_pipeline(
884
887
  ops=ops,
885
888
  artifact_path=artifact_path,
886
889
  cleanup_ttl=cleanup_ttl,
890
+ timeout=timeout,
887
891
  )
888
892
  logger.info(f"Pipeline run id={pipeline_run_id}, check UI for progress")
889
893
  return pipeline_run_id
@@ -961,7 +965,7 @@ def wait_for_pipeline_completion(
961
965
  show_kfp_run(resp)
962
966
 
963
967
  status = resp["run"]["status"] if resp else "unknown"
964
- message = resp["run"].get("message", "")
968
+ message = resp["run"].get("message", "") if resp else ""
965
969
  if expected_statuses:
966
970
  if status not in expected_statuses:
967
971
  raise RuntimeError(
mlrun/runtimes/base.py CHANGED
@@ -840,6 +840,12 @@ class BaseRuntime(ModelObj):
840
840
  or (build.source and not build.load_source_on_run)
841
841
  )
842
842
 
843
+ def enrich_runtime_spec(
844
+ self,
845
+ project_node_selector: dict[str, str],
846
+ ):
847
+ pass
848
+
843
849
  def prepare_image_for_deploy(self):
844
850
  """
845
851
  if a function has a 'spec.image' it is considered to be deployed,