mlrun 1.4.0rc25__py3-none-any.whl → 1.5.0rc2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (184) hide show
  1. mlrun/__init__.py +2 -35
  2. mlrun/__main__.py +3 -41
  3. mlrun/api/api/api.py +6 -0
  4. mlrun/api/api/endpoints/feature_store.py +0 -4
  5. mlrun/api/api/endpoints/files.py +14 -2
  6. mlrun/api/api/endpoints/frontend_spec.py +2 -1
  7. mlrun/api/api/endpoints/functions.py +95 -59
  8. mlrun/api/api/endpoints/grafana_proxy.py +9 -9
  9. mlrun/api/api/endpoints/logs.py +17 -3
  10. mlrun/api/api/endpoints/model_endpoints.py +3 -2
  11. mlrun/api/api/endpoints/pipelines.py +1 -5
  12. mlrun/api/api/endpoints/projects.py +88 -0
  13. mlrun/api/api/endpoints/runs.py +48 -6
  14. mlrun/api/api/endpoints/submit.py +2 -1
  15. mlrun/api/api/endpoints/workflows.py +355 -0
  16. mlrun/api/api/utils.py +3 -4
  17. mlrun/api/crud/__init__.py +1 -0
  18. mlrun/api/crud/client_spec.py +6 -2
  19. mlrun/api/crud/feature_store.py +5 -0
  20. mlrun/api/crud/model_monitoring/__init__.py +1 -0
  21. mlrun/api/crud/model_monitoring/deployment.py +497 -0
  22. mlrun/api/crud/model_monitoring/grafana.py +96 -42
  23. mlrun/api/crud/model_monitoring/helpers.py +159 -0
  24. mlrun/api/crud/model_monitoring/model_endpoints.py +202 -476
  25. mlrun/api/crud/notifications.py +9 -4
  26. mlrun/api/crud/pipelines.py +6 -11
  27. mlrun/api/crud/projects.py +2 -2
  28. mlrun/api/crud/runtime_resources.py +4 -3
  29. mlrun/api/crud/runtimes/nuclio/helpers.py +5 -1
  30. mlrun/api/crud/secrets.py +21 -0
  31. mlrun/api/crud/workflows.py +352 -0
  32. mlrun/api/db/base.py +16 -1
  33. mlrun/api/db/init_db.py +2 -4
  34. mlrun/api/db/session.py +1 -1
  35. mlrun/api/db/sqldb/db.py +129 -31
  36. mlrun/api/db/sqldb/models/models_mysql.py +15 -1
  37. mlrun/api/db/sqldb/models/models_sqlite.py +16 -2
  38. mlrun/api/launcher.py +38 -6
  39. mlrun/api/main.py +3 -2
  40. mlrun/api/rundb/__init__.py +13 -0
  41. mlrun/{db → api/rundb}/sqldb.py +36 -84
  42. mlrun/api/runtime_handlers/__init__.py +56 -0
  43. mlrun/api/runtime_handlers/base.py +1247 -0
  44. mlrun/api/runtime_handlers/daskjob.py +209 -0
  45. mlrun/api/runtime_handlers/kubejob.py +37 -0
  46. mlrun/api/runtime_handlers/mpijob.py +147 -0
  47. mlrun/api/runtime_handlers/remotesparkjob.py +29 -0
  48. mlrun/api/runtime_handlers/sparkjob.py +148 -0
  49. mlrun/api/schemas/__init__.py +17 -6
  50. mlrun/api/utils/builder.py +1 -4
  51. mlrun/api/utils/clients/chief.py +14 -0
  52. mlrun/api/utils/clients/iguazio.py +33 -33
  53. mlrun/api/utils/clients/nuclio.py +2 -2
  54. mlrun/api/utils/periodic.py +9 -2
  55. mlrun/api/utils/projects/follower.py +14 -7
  56. mlrun/api/utils/projects/leader.py +2 -1
  57. mlrun/api/utils/projects/remotes/nop_follower.py +2 -2
  58. mlrun/api/utils/projects/remotes/nop_leader.py +2 -2
  59. mlrun/api/utils/runtimes/__init__.py +14 -0
  60. mlrun/api/utils/runtimes/nuclio.py +43 -0
  61. mlrun/api/utils/scheduler.py +98 -15
  62. mlrun/api/utils/singletons/db.py +5 -1
  63. mlrun/api/utils/singletons/project_member.py +4 -1
  64. mlrun/api/utils/singletons/scheduler.py +1 -1
  65. mlrun/artifacts/base.py +6 -6
  66. mlrun/artifacts/dataset.py +4 -4
  67. mlrun/artifacts/manager.py +2 -3
  68. mlrun/artifacts/model.py +2 -2
  69. mlrun/artifacts/plots.py +8 -8
  70. mlrun/common/db/__init__.py +14 -0
  71. mlrun/common/helpers.py +37 -0
  72. mlrun/{mlutils → common/model_monitoring}/__init__.py +3 -2
  73. mlrun/common/model_monitoring/helpers.py +69 -0
  74. mlrun/common/schemas/__init__.py +13 -1
  75. mlrun/common/schemas/auth.py +4 -1
  76. mlrun/common/schemas/client_spec.py +1 -1
  77. mlrun/common/schemas/function.py +17 -0
  78. mlrun/common/schemas/model_monitoring/__init__.py +48 -0
  79. mlrun/common/{model_monitoring.py → schemas/model_monitoring/constants.py} +11 -23
  80. mlrun/common/schemas/model_monitoring/grafana.py +55 -0
  81. mlrun/common/schemas/{model_endpoints.py → model_monitoring/model_endpoints.py} +32 -65
  82. mlrun/common/schemas/notification.py +1 -0
  83. mlrun/common/schemas/object.py +4 -0
  84. mlrun/common/schemas/project.py +1 -0
  85. mlrun/common/schemas/regex.py +1 -1
  86. mlrun/common/schemas/runs.py +1 -8
  87. mlrun/common/schemas/schedule.py +1 -8
  88. mlrun/common/schemas/workflow.py +54 -0
  89. mlrun/config.py +45 -42
  90. mlrun/datastore/__init__.py +21 -0
  91. mlrun/datastore/base.py +1 -1
  92. mlrun/datastore/datastore.py +9 -0
  93. mlrun/datastore/dbfs_store.py +168 -0
  94. mlrun/datastore/helpers.py +18 -0
  95. mlrun/datastore/sources.py +1 -0
  96. mlrun/datastore/store_resources.py +2 -5
  97. mlrun/datastore/v3io.py +1 -2
  98. mlrun/db/__init__.py +4 -68
  99. mlrun/db/base.py +12 -0
  100. mlrun/db/factory.py +65 -0
  101. mlrun/db/httpdb.py +175 -20
  102. mlrun/db/nopdb.py +4 -2
  103. mlrun/execution.py +4 -2
  104. mlrun/feature_store/__init__.py +1 -0
  105. mlrun/feature_store/api.py +1 -2
  106. mlrun/feature_store/common.py +2 -1
  107. mlrun/feature_store/feature_set.py +1 -11
  108. mlrun/feature_store/feature_vector.py +340 -2
  109. mlrun/feature_store/ingestion.py +5 -10
  110. mlrun/feature_store/retrieval/base.py +118 -104
  111. mlrun/feature_store/retrieval/dask_merger.py +17 -10
  112. mlrun/feature_store/retrieval/job.py +4 -1
  113. mlrun/feature_store/retrieval/local_merger.py +18 -18
  114. mlrun/feature_store/retrieval/spark_merger.py +21 -14
  115. mlrun/feature_store/retrieval/storey_merger.py +22 -16
  116. mlrun/kfpops.py +3 -9
  117. mlrun/launcher/base.py +57 -53
  118. mlrun/launcher/client.py +5 -4
  119. mlrun/launcher/factory.py +24 -13
  120. mlrun/launcher/local.py +6 -6
  121. mlrun/launcher/remote.py +4 -4
  122. mlrun/lists.py +0 -11
  123. mlrun/model.py +11 -17
  124. mlrun/model_monitoring/__init__.py +2 -22
  125. mlrun/model_monitoring/features_drift_table.py +1 -1
  126. mlrun/model_monitoring/helpers.py +22 -210
  127. mlrun/model_monitoring/model_endpoint.py +1 -1
  128. mlrun/model_monitoring/model_monitoring_batch.py +127 -50
  129. mlrun/model_monitoring/prometheus.py +219 -0
  130. mlrun/model_monitoring/stores/__init__.py +16 -11
  131. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +95 -23
  132. mlrun/model_monitoring/stores/models/mysql.py +47 -29
  133. mlrun/model_monitoring/stores/models/sqlite.py +47 -29
  134. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +31 -19
  135. mlrun/model_monitoring/{stream_processing_fs.py → stream_processing.py} +206 -64
  136. mlrun/model_monitoring/tracking_policy.py +104 -0
  137. mlrun/package/packager.py +6 -8
  138. mlrun/package/packagers/default_packager.py +121 -10
  139. mlrun/package/packagers/numpy_packagers.py +1 -1
  140. mlrun/platforms/__init__.py +0 -2
  141. mlrun/platforms/iguazio.py +0 -56
  142. mlrun/projects/pipelines.py +53 -159
  143. mlrun/projects/project.py +10 -37
  144. mlrun/render.py +1 -1
  145. mlrun/run.py +8 -124
  146. mlrun/runtimes/__init__.py +6 -42
  147. mlrun/runtimes/base.py +29 -1249
  148. mlrun/runtimes/daskjob.py +2 -198
  149. mlrun/runtimes/funcdoc.py +0 -9
  150. mlrun/runtimes/function.py +25 -29
  151. mlrun/runtimes/kubejob.py +5 -29
  152. mlrun/runtimes/local.py +1 -1
  153. mlrun/runtimes/mpijob/__init__.py +2 -2
  154. mlrun/runtimes/mpijob/abstract.py +10 -1
  155. mlrun/runtimes/mpijob/v1.py +0 -76
  156. mlrun/runtimes/mpijob/v1alpha1.py +1 -74
  157. mlrun/runtimes/nuclio.py +3 -2
  158. mlrun/runtimes/pod.py +28 -18
  159. mlrun/runtimes/remotesparkjob.py +1 -15
  160. mlrun/runtimes/serving.py +14 -6
  161. mlrun/runtimes/sparkjob/__init__.py +0 -1
  162. mlrun/runtimes/sparkjob/abstract.py +4 -131
  163. mlrun/runtimes/utils.py +0 -26
  164. mlrun/serving/routers.py +7 -7
  165. mlrun/serving/server.py +11 -8
  166. mlrun/serving/states.py +7 -1
  167. mlrun/serving/v2_serving.py +6 -6
  168. mlrun/utils/helpers.py +23 -42
  169. mlrun/utils/notifications/notification/__init__.py +4 -0
  170. mlrun/utils/notifications/notification/webhook.py +61 -0
  171. mlrun/utils/notifications/notification_pusher.py +5 -25
  172. mlrun/utils/regex.py +7 -2
  173. mlrun/utils/version/version.json +2 -2
  174. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/METADATA +26 -25
  175. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/RECORD +180 -158
  176. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/WHEEL +1 -1
  177. mlrun/mlutils/data.py +0 -160
  178. mlrun/mlutils/models.py +0 -78
  179. mlrun/mlutils/plots.py +0 -902
  180. mlrun/utils/model_monitoring.py +0 -249
  181. /mlrun/{api/db/sqldb/session.py → common/db/sql_session.py} +0 -0
  182. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/LICENSE +0 -0
  183. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/entry_points.txt +0 -0
  184. {mlrun-1.4.0rc25.dist-info → mlrun-1.5.0rc2.dist-info}/top_level.txt +0 -0
@@ -129,11 +129,9 @@ async def get_pipeline(
129
129
  auth_info: mlrun.common.schemas.AuthInfo = Depends(
130
130
  mlrun.api.api.deps.authenticate_request
131
131
  ),
132
- db_session: Session = Depends(deps.get_db_session),
133
132
  ):
134
133
  pipeline = await run_in_threadpool(
135
134
  mlrun.api.crud.Pipelines().get_pipeline,
136
- db_session,
137
135
  run_id,
138
136
  project,
139
137
  namespace,
@@ -145,7 +143,7 @@ async def get_pipeline(
145
143
  # legacy flow in which we first get the pipeline, resolve the project out of it, and only then query permissions
146
144
  # we don't use the return value from this function since the user may have asked for a different format than
147
145
  # summary which is the one used inside
148
- await _get_pipeline_without_project(db_session, auth_info, run_id, namespace)
146
+ await _get_pipeline_without_project(auth_info, run_id, namespace)
149
147
  else:
150
148
  await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
151
149
  mlrun.common.schemas.AuthorizationResourceTypes.pipeline,
@@ -158,7 +156,6 @@ async def get_pipeline(
158
156
 
159
157
 
160
158
  async def _get_pipeline_without_project(
161
- db_session: Session,
162
159
  auth_info: mlrun.common.schemas.AuthInfo,
163
160
  run_id: str,
164
161
  namespace: str,
@@ -170,7 +167,6 @@ async def _get_pipeline_without_project(
170
167
  """
171
168
  run = await run_in_threadpool(
172
169
  mlrun.api.crud.Pipelines().get_pipeline,
173
- db_session,
174
170
  run_id,
175
171
  namespace=namespace,
176
172
  # minimal format that includes the project
@@ -20,6 +20,7 @@ import sqlalchemy.orm
20
20
  from fastapi.concurrency import run_in_threadpool
21
21
 
22
22
  import mlrun.api.api.deps
23
+ import mlrun.api.crud
23
24
  import mlrun.api.utils.auth.verifier
24
25
  import mlrun.api.utils.clients.chief
25
26
  import mlrun.common.schemas
@@ -323,6 +324,93 @@ async def get_project_summary(
323
324
  return project_summary
324
325
 
325
326
 
327
+ @router.post("/projects/{name}/load")
328
+ async def load_project(
329
+ name: str,
330
+ url: str,
331
+ secrets: mlrun.common.schemas.SecretsData = None,
332
+ auth_info: mlrun.common.schemas.AuthInfo = fastapi.Depends(
333
+ mlrun.api.api.deps.authenticate_request
334
+ ),
335
+ db_session: sqlalchemy.orm.Session = fastapi.Depends(
336
+ mlrun.api.api.deps.get_db_session
337
+ ),
338
+ ):
339
+ """
340
+ Loading a project remotely from a given source.
341
+
342
+ :param name: project name
343
+ :param url: git or tar.gz or .zip sources archive path e.g.:
344
+ git://github.com/mlrun/demo-xgb-project.git
345
+ http://mysite/archived-project.zip
346
+ The git project should include the project yaml file.
347
+ :param secrets: Secrets to store in project in order to load it from the provided url.
348
+ For more information see :py:func:`mlrun.load_project` function.
349
+ :param auth_info: auth info of the request
350
+ :param db_session: session that manages the current dialog with the database
351
+
352
+ :returns: a Run object of the load project function
353
+ """
354
+
355
+ project = mlrun.common.schemas.Project(
356
+ metadata=mlrun.common.schemas.ProjectMetadata(name=name),
357
+ spec=mlrun.common.schemas.ProjectSpec(source=url),
358
+ )
359
+
360
+ # We must create the project before we run the remote load_project function because
361
+ # we want this function will be running under the project itself instead of the default project.
362
+ project, _ = await fastapi.concurrency.run_in_threadpool(
363
+ get_project_member().create_project,
364
+ db_session=db_session,
365
+ project=project,
366
+ projects_role=auth_info.projects_role,
367
+ leader_session=auth_info.session,
368
+ )
369
+
370
+ # Storing secrets in project
371
+ if secrets is not None:
372
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
373
+ mlrun.common.schemas.AuthorizationResourceTypes.secret,
374
+ project.metadata.name,
375
+ secrets.provider,
376
+ mlrun.common.schemas.AuthorizationAction.create,
377
+ auth_info,
378
+ )
379
+
380
+ await run_in_threadpool(
381
+ mlrun.api.crud.Secrets().store_project_secrets,
382
+ project.metadata.name,
383
+ secrets,
384
+ )
385
+
386
+ # Creating the auxiliary function for loading the project:
387
+ load_project_runner = await fastapi.concurrency.run_in_threadpool(
388
+ mlrun.api.crud.WorkflowRunners().create_runner,
389
+ run_name=f"load-{name}",
390
+ project=name,
391
+ db_session=db_session,
392
+ auth_info=auth_info,
393
+ image=mlrun.mlconf.default_base_image,
394
+ )
395
+
396
+ logger.debug(
397
+ "Saved function for loading project",
398
+ project_name=name,
399
+ function_name=load_project_runner.metadata.name,
400
+ kind=load_project_runner.kind,
401
+ source=project.spec.source,
402
+ )
403
+
404
+ run = await fastapi.concurrency.run_in_threadpool(
405
+ mlrun.api.crud.WorkflowRunners().run,
406
+ runner=load_project_runner,
407
+ project=project,
408
+ workflow_request=None,
409
+ load_only=True,
410
+ )
411
+ return {"data": run.to_dict()}
412
+
413
+
326
414
  def _is_request_from_leader(
327
415
  projects_role: typing.Optional[mlrun.common.schemas.ProjectsRole],
328
416
  ) -> bool:
@@ -31,7 +31,14 @@ from mlrun.utils.helpers import datetime_from_iso
31
31
  router = APIRouter()
32
32
 
33
33
 
34
- @router.post("/run/{project}/{uid}")
34
+ # TODO: remove /run/{project}/{uid} in 1.7.0
35
+ @router.post(
36
+ "/run/{project}/{uid}",
37
+ deprecated=True,
38
+ description="/run/{project}/{uid} is deprecated in 1.5.0 and will be removed in 1.7.0, "
39
+ "use /projects/{project}/runs/{uid} instead",
40
+ )
41
+ @router.post("/projects/{project}/runs/{uid}")
35
42
  async def store_run(
36
43
  request: Request,
37
44
  project: str,
@@ -70,7 +77,14 @@ async def store_run(
70
77
  return {}
71
78
 
72
79
 
73
- @router.patch("/run/{project}/{uid}")
80
+ # TODO: remove /run/{project}/{uid} in 1.7.0
81
+ @router.patch(
82
+ "/run/{project}/{uid}",
83
+ deprecated=True,
84
+ description="/run/{project}/{uid} is deprecated in 1.5.0 and will be removed in 1.7.0, "
85
+ "use /projects/{project}/runs/{uid} instead",
86
+ )
87
+ @router.patch("/projects/{project}/runs/{uid}")
74
88
  async def update_run(
75
89
  request: Request,
76
90
  project: str,
@@ -103,7 +117,14 @@ async def update_run(
103
117
  return {}
104
118
 
105
119
 
106
- @router.get("/run/{project}/{uid}")
120
+ # TODO: remove /run/{project}/{uid} in 1.7.0
121
+ @router.get(
122
+ "/run/{project}/{uid}",
123
+ deprecated=True,
124
+ description="/run/{project}/{uid} is deprecated in 1.5.0 and will be removed in 1.7.0, "
125
+ "use /projects/{project}/runs/{uid} instead",
126
+ )
127
+ @router.get("/projects/{project}/runs/{uid}")
107
128
  async def get_run(
108
129
  project: str,
109
130
  uid: str,
@@ -126,7 +147,14 @@ async def get_run(
126
147
  }
127
148
 
128
149
 
129
- @router.delete("/run/{project}/{uid}")
150
+ # TODO: remove /run/{project}/{uid} in 1.7.0
151
+ @router.delete(
152
+ "/run/{project}/{uid}",
153
+ deprecated=True,
154
+ description="/run/{project}/{uid} is deprecated in 1.5.0 and will be removed in 1.7.0, "
155
+ "use /projects/{project}/runs/{uid} instead",
156
+ )
157
+ @router.delete("/projects/{project}/runs/{uid}")
130
158
  async def delete_run(
131
159
  project: str,
132
160
  uid: str,
@@ -151,7 +179,14 @@ async def delete_run(
151
179
  return {}
152
180
 
153
181
 
154
- @router.get("/runs")
182
+ # TODO: remove /runs in 1.7.0
183
+ @router.get(
184
+ "/runs",
185
+ deprecated=True,
186
+ description="/runs is deprecated in 1.5.0 and will be removed in 1.7.0, "
187
+ "use /projects/{project}/runs/{uid} instead",
188
+ )
189
+ @router.get("/projects/{project}/runs")
155
190
  async def list_runs(
156
191
  project: str = None,
157
192
  name: str = None,
@@ -222,7 +257,14 @@ async def list_runs(
222
257
  }
223
258
 
224
259
 
225
- @router.delete("/runs")
260
+ # TODO: remove /runs in 1.7.0
261
+ @router.delete(
262
+ "/runs",
263
+ deprecated=True,
264
+ description="/runs is deprecated in 1.5.0 and will be removed in 1.7.0, "
265
+ "use /projects/{project}/runs/{uid} instead",
266
+ )
267
+ @router.delete("/projects/{project}/runs")
226
268
  async def delete_runs(
227
269
  project: str = None,
228
270
  name: str = None,
@@ -23,6 +23,7 @@ import mlrun.api.api.utils
23
23
  import mlrun.api.utils.auth.verifier
24
24
  import mlrun.api.utils.clients.chief
25
25
  import mlrun.api.utils.singletons.project_member
26
+ import mlrun.common.helpers
26
27
  import mlrun.common.schemas
27
28
  import mlrun.utils.helpers
28
29
  from mlrun.api.api import deps
@@ -68,7 +69,7 @@ async def submit_job(
68
69
  function_name,
69
70
  _,
70
71
  _,
71
- ) = mlrun.utils.helpers.parse_versioned_object_uri(function_url)
72
+ ) = mlrun.common.helpers.parse_versioned_object_uri(function_url)
72
73
  await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
73
74
  mlrun.common.schemas.AuthorizationResourceTypes.function,
74
75
  function_project,
@@ -0,0 +1,355 @@
1
+ # Copyright 2018 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+ import collections.abc
16
+ import copy
17
+ import traceback
18
+ import typing
19
+ from http import HTTPStatus
20
+ from typing import Dict
21
+
22
+ import fastapi
23
+ from fastapi.concurrency import run_in_threadpool
24
+ from sqlalchemy.orm import Session
25
+
26
+ import mlrun
27
+ import mlrun.api.api.deps
28
+ import mlrun.api.api.utils
29
+ import mlrun.api.crud
30
+ import mlrun.api.utils.auth.verifier
31
+ import mlrun.api.utils.clients.chief
32
+ import mlrun.api.utils.singletons.db
33
+ import mlrun.api.utils.singletons.project_member
34
+ import mlrun.common.schemas
35
+ import mlrun.projects.pipelines
36
+ from mlrun.api.api.utils import log_and_raise
37
+ from mlrun.utils.helpers import logger
38
+
39
+ router = fastapi.APIRouter()
40
+
41
+
42
+ @router.post(
43
+ "/projects/{project}/workflows/{name}/submit",
44
+ status_code=HTTPStatus.ACCEPTED.value,
45
+ response_model=mlrun.common.schemas.WorkflowResponse,
46
+ )
47
+ async def submit_workflow(
48
+ project: str,
49
+ name: str,
50
+ request: fastapi.Request,
51
+ workflow_request: mlrun.common.schemas.WorkflowRequest = mlrun.common.schemas.WorkflowRequest(),
52
+ auth_info: mlrun.common.schemas.AuthInfo = fastapi.Depends(
53
+ mlrun.api.api.deps.authenticate_request
54
+ ),
55
+ db_session: Session = fastapi.Depends(mlrun.api.api.deps.get_db_session),
56
+ ):
57
+ """
58
+ Submitting a workflow of existing project.
59
+ To support workflow scheduling, we use here an auxiliary function called 'load_and_run'.
60
+ This function runs remotely (in a distinct pod), loads a project and then runs the workflow.
61
+ In this way we can run the workflow remotely with the workflow's engine or
62
+ schedule this function which in every time loads the project and runs the workflow.
63
+ Notice:
64
+ in case of simply running a workflow, the returned run_id value is the id of the run of the auxiliary function.
65
+ For getting the id and status of the workflow, use the `get_workflow_id` endpoint with the returned run id.
66
+
67
+ :param project: name of the project
68
+ :param name: name of the workflow
69
+ :param request: fastapi request for supporting rerouting to chief if needed
70
+ :param workflow_request: the request includes: workflow spec, arguments for the workflow, artifact path
71
+ as the artifact target path of the workflow, source url of the project for overriding
72
+ the existing one, run name to override the default: 'workflow-runner-<workflow name>'
73
+ and kubernetes namespace if other than default
74
+ :param auth_info: auth info of the request
75
+ :param db_session: session that manages the current dialog with the database
76
+
77
+ :returns: response that contains the project name, workflow name, name of the workflow,
78
+ status, run id (in case of a single run) and schedule (in case of scheduling)
79
+ """
80
+ project = await run_in_threadpool(
81
+ mlrun.api.utils.singletons.project_member.get_project_member().get_project,
82
+ db_session=db_session,
83
+ name=project,
84
+ leader_session=auth_info.session,
85
+ )
86
+
87
+ # check permission CREATE run
88
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
89
+ resource_type=mlrun.common.schemas.AuthorizationResourceTypes.run,
90
+ project_name=project.metadata.name,
91
+ resource_name=workflow_request.run_name or "",
92
+ action=mlrun.common.schemas.AuthorizationAction.create,
93
+ auth_info=auth_info,
94
+ )
95
+ # check permission READ workflow on project's workflow
96
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
97
+ resource_type=mlrun.common.schemas.AuthorizationResourceTypes.workflow,
98
+ project_name=project.metadata.name,
99
+ resource_name=name,
100
+ action=mlrun.common.schemas.AuthorizationAction.read,
101
+ auth_info=auth_info,
102
+ )
103
+ # Check permission CREATE workflow on new workflow's name
104
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
105
+ resource_type=mlrun.common.schemas.AuthorizationResourceTypes.workflow,
106
+ project_name=project.metadata.name,
107
+ # If workflow spec has not passed need to create on same name:
108
+ resource_name=getattr(workflow_request.spec, "name", name),
109
+ action=mlrun.common.schemas.AuthorizationAction.create,
110
+ auth_info=auth_info,
111
+ )
112
+ # Re-route to chief in case of schedule
113
+ if (
114
+ _is_requested_schedule(name, workflow_request.spec, project)
115
+ and mlrun.mlconf.httpdb.clusterization.role
116
+ != mlrun.common.schemas.ClusterizationRole.chief
117
+ ):
118
+ chief_client = mlrun.api.utils.clients.chief.Client()
119
+ return await chief_client.submit_workflow(
120
+ project=project.metadata.name,
121
+ name=name,
122
+ request=request,
123
+ json=workflow_request.dict(),
124
+ )
125
+
126
+ workflow_spec = _fill_workflow_missing_fields_from_project(
127
+ project=project,
128
+ workflow_name=name,
129
+ spec=workflow_request.spec,
130
+ arguments=workflow_request.arguments,
131
+ )
132
+ updated_request = workflow_request.copy()
133
+ updated_request.spec = workflow_spec
134
+
135
+ # This function is for loading the project and running workflow remotely.
136
+ # In this way we can schedule workflows (by scheduling a job that runs the workflow)
137
+ workflow_runner = await run_in_threadpool(
138
+ mlrun.api.crud.WorkflowRunners().create_runner,
139
+ run_name=updated_request.run_name
140
+ or mlrun.mlconf.workflows.default_workflow_runner_name.format(
141
+ workflow_spec.name
142
+ ),
143
+ project=project.metadata.name,
144
+ db_session=db_session,
145
+ auth_info=auth_info,
146
+ image=workflow_spec.image
147
+ or project.spec.default_image
148
+ or mlrun.mlconf.default_base_image,
149
+ )
150
+
151
+ logger.debug(
152
+ "Saved function for running workflow",
153
+ project_name=workflow_runner.metadata.project,
154
+ function_name=workflow_runner.metadata.name,
155
+ workflow_name=workflow_spec.name,
156
+ arguments=workflow_spec.args,
157
+ source=updated_request.source or project.spec.source,
158
+ kind=workflow_runner.kind,
159
+ image=workflow_runner.spec.image,
160
+ )
161
+
162
+ run_uid = None
163
+ status = None
164
+ workflow_action = "schedule" if workflow_spec.schedule else "run"
165
+ try:
166
+ if workflow_spec.schedule:
167
+ await run_in_threadpool(
168
+ mlrun.api.crud.WorkflowRunners().schedule,
169
+ runner=workflow_runner,
170
+ project=project,
171
+ workflow_request=updated_request,
172
+ db_session=db_session,
173
+ auth_info=auth_info,
174
+ )
175
+ status = "scheduled"
176
+
177
+ else:
178
+ run = await run_in_threadpool(
179
+ mlrun.api.crud.WorkflowRunners().run,
180
+ runner=workflow_runner,
181
+ project=project,
182
+ workflow_request=updated_request,
183
+ )
184
+ status = mlrun.run.RunStatuses.running
185
+ run_uid = run.uid()
186
+ except Exception as error:
187
+ logger.error(traceback.format_exc())
188
+ log_and_raise(
189
+ reason="Workflow failed",
190
+ workflow_name=workflow_spec.name,
191
+ workflow_action=workflow_action,
192
+ error=mlrun.errors.err_to_str(error),
193
+ )
194
+
195
+ return mlrun.common.schemas.WorkflowResponse(
196
+ project=project.metadata.name,
197
+ name=workflow_spec.name,
198
+ status=status,
199
+ run_id=run_uid,
200
+ schedule=workflow_spec.schedule,
201
+ )
202
+
203
+
204
+ def _is_requested_schedule(
205
+ name: str,
206
+ workflow_spec: mlrun.common.schemas.WorkflowSpec,
207
+ project: mlrun.common.schemas.Project,
208
+ ) -> bool:
209
+ """
210
+ Checks if the workflow needs to be scheduled, which can be decided either the request itself
211
+ contains schedule information or the workflow which was predefined in the project contains schedule.
212
+
213
+ :param name: workflow name
214
+ :param workflow_spec: workflow spec input
215
+ :param project: MLRun project that contains the workflow
216
+
217
+ :return: True if the workflow need to be scheduled and False if not.
218
+ """
219
+ if workflow_spec:
220
+ return workflow_spec.schedule is not None
221
+
222
+ project_workflow = _get_workflow_by_name(project, name)
223
+ return bool(project_workflow.get("schedule"))
224
+
225
+
226
+ def _get_workflow_by_name(
227
+ project: mlrun.common.schemas.Project, name: str
228
+ ) -> typing.Optional[Dict]:
229
+ """
230
+ Getting workflow from project
231
+
232
+ :param project: MLRun project
233
+ :param name: workflow name
234
+
235
+ :return: workflow as a dict if project has the workflow, otherwise raises a bad request exception
236
+ """
237
+ for workflow in project.spec.workflows:
238
+ if workflow["name"] == name:
239
+ return workflow
240
+ log_and_raise(
241
+ reason=f"workflow {name} not found in project",
242
+ )
243
+
244
+
245
+ def _fill_workflow_missing_fields_from_project(
246
+ project: mlrun.common.schemas.Project,
247
+ workflow_name: str,
248
+ spec: mlrun.common.schemas.WorkflowSpec,
249
+ arguments: typing.Dict,
250
+ ) -> mlrun.common.schemas.WorkflowSpec:
251
+ """
252
+ Fill the workflow spec details from the project object, with favour to spec
253
+
254
+ :param project: MLRun project that contains the workflow.
255
+ :param workflow_name: workflow name
256
+ :param spec: workflow spec input
257
+ :param arguments: arguments to workflow
258
+
259
+ :return: completed workflow spec
260
+ """
261
+ # Verifying workflow exists in project:
262
+ workflow = _get_workflow_by_name(project, workflow_name)
263
+
264
+ if spec:
265
+ # Merge between the workflow spec provided in the request with existing
266
+ # workflow while the provided workflow takes precedence over the existing workflow params
267
+ workflow = copy.deepcopy(workflow)
268
+ workflow = _update_dict(workflow, spec.dict())
269
+
270
+ workflow_spec = mlrun.common.schemas.WorkflowSpec(**workflow)
271
+ # Overriding arguments of the existing workflow:
272
+ if arguments:
273
+ workflow_spec.args = workflow_spec.args or {}
274
+ workflow_spec.args.update(arguments)
275
+
276
+ return workflow_spec
277
+
278
+
279
+ def _update_dict(dict_1: dict, dict_2: dict):
280
+ """
281
+ Update two dictionaries included nested dictionaries (recursively).
282
+ :param dict_1: The dict to update
283
+ :param dict_2: The values of this dict take precedence over dict_1.
284
+ :return:
285
+ """
286
+ for key, val in dict_2.items():
287
+ if isinstance(val, collections.abc.Mapping):
288
+ dict_1[key] = _update_dict(dict_1.get(key, {}), val)
289
+ # It is necessary to update only if value is exist because
290
+ # on initialization of the WorkflowSpec object all unfilled values gets None values,
291
+ # and when converting to dict the keys gets those None values.
292
+ elif val:
293
+ dict_1[key] = val
294
+ return dict_1
295
+
296
+
297
+ @router.get(
298
+ "/projects/{project}/workflows/{name}/runs/{uid}",
299
+ response_model=mlrun.common.schemas.GetWorkflowResponse,
300
+ )
301
+ async def get_workflow_id(
302
+ project: str,
303
+ name: str,
304
+ uid: str,
305
+ auth_info: mlrun.common.schemas.AuthInfo = fastapi.Depends(
306
+ mlrun.api.api.deps.authenticate_request
307
+ ),
308
+ db_session: Session = fastapi.Depends(mlrun.api.api.deps.get_db_session),
309
+ engine: str = "kfp",
310
+ ) -> mlrun.common.schemas.GetWorkflowResponse:
311
+ """
312
+ Retrieve workflow id from the uid of the workflow runner.
313
+ When creating a remote workflow we are creating an auxiliary function
314
+ which is responsible for actually running the workflow,
315
+ as we don't know beforehand the workflow uid but only the run uid of the auxiliary function we ran,
316
+ we have to wait until the running function will log the workflow id it created.
317
+ Because we don't know how long it will take for the run to create the workflow
318
+ we decided to implement that in an asynchronous mechanism which at first,
319
+ client will get the run uid and then will pull the workflow id from the run id
320
+ kinda as you would use a background task to query if it finished.
321
+ Supporting workflows that executed by the remote engine **only**.
322
+
323
+ :param project: name of the project
324
+ :param name: name of the workflow
325
+ :param uid: the id of the running job that runs the workflow
326
+ :param auth_info: auth info of the request
327
+ :param db_session: session that manages the current dialog with the database
328
+ :param engine: pipeline runner, for example: "kfp"
329
+
330
+ :returns: workflow id
331
+ """
332
+ # Check permission READ run:
333
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
334
+ mlrun.common.schemas.AuthorizationResourceTypes.run,
335
+ project,
336
+ uid,
337
+ mlrun.common.schemas.AuthorizationAction.read,
338
+ auth_info,
339
+ )
340
+ # Check permission READ workflow:
341
+ await mlrun.api.utils.auth.verifier.AuthVerifier().query_project_resource_permissions(
342
+ mlrun.common.schemas.AuthorizationResourceTypes.workflow,
343
+ project,
344
+ name,
345
+ mlrun.common.schemas.AuthorizationAction.read,
346
+ auth_info,
347
+ )
348
+
349
+ return await run_in_threadpool(
350
+ mlrun.api.crud.WorkflowRunners().get_workflow_id,
351
+ uid=uid,
352
+ project=project,
353
+ engine=engine,
354
+ db_session=db_session,
355
+ )
mlrun/api/api/utils.py CHANGED
@@ -37,15 +37,16 @@ import mlrun.errors
37
37
  import mlrun.runtimes.pod
38
38
  import mlrun.utils.helpers
39
39
  from mlrun.api.db.sqldb.db import SQLDB
40
+ from mlrun.api.rundb.sqldb import SQLRunDB
40
41
  from mlrun.api.utils.singletons.db import get_db
41
42
  from mlrun.api.utils.singletons.logs_dir import get_logs_dir
42
43
  from mlrun.api.utils.singletons.scheduler import get_scheduler
44
+ from mlrun.common.helpers import parse_versioned_object_uri
43
45
  from mlrun.config import config
44
- from mlrun.db.sqldb import SQLDB as SQLRunDB
45
46
  from mlrun.errors import err_to_str
46
47
  from mlrun.run import import_function, new_function
47
48
  from mlrun.runtimes.utils import enrich_function_from_dict
48
- from mlrun.utils import get_in, logger, parse_versioned_object_uri
49
+ from mlrun.utils import get_in, logger
49
50
 
50
51
 
51
52
  def log_and_raise(status=HTTPStatus.BAD_REQUEST.value, **kw):
@@ -203,7 +204,6 @@ async def submit_run(
203
204
 
204
205
 
205
206
  def apply_enrichment_and_validation_on_task(task):
206
-
207
207
  # Masking notification config params from the task object
208
208
  mask_notification_params_on_task(task)
209
209
 
@@ -817,7 +817,6 @@ def ensure_function_security_context(
817
817
  mlrun.common.schemas.SecurityContextEnrichmentModes.override.value,
818
818
  mlrun.common.schemas.SecurityContextEnrichmentModes.retain.value,
819
819
  ]:
820
-
821
820
  # before iguazio 3.6 the user unix id is not passed in the session verification response headers
822
821
  # so we need to request it explicitly
823
822
  if auth_info.user_unix_id is None:
@@ -29,3 +29,4 @@ from .runs import Runs
29
29
  from .runtime_resources import RuntimeResources
30
30
  from .secrets import Secrets, SecretsClientType
31
31
  from .tags import Tags
32
+ from .workflows import WorkflowRunners
@@ -12,10 +12,11 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+ import mlrun.api.utils.runtimes.nuclio
15
16
  import mlrun.common.schemas
16
17
  import mlrun.utils.singleton
17
18
  from mlrun.config import Config, config, default_config
18
- from mlrun.runtimes.utils import resolve_mpijob_crd_version, resolve_nuclio_version
19
+ from mlrun.runtimes.utils import resolve_mpijob_crd_version
19
20
 
20
21
 
21
22
  class ClientSpec(
@@ -44,7 +45,7 @@ class ClientSpec(
44
45
  config.dask_kfp_image, client_version, client_python_version
45
46
  ),
46
47
  api_url=config.httpdb.api_url,
47
- nuclio_version=resolve_nuclio_version(),
48
+ nuclio_version=mlrun.api.utils.runtimes.nuclio.resolve_nuclio_version(),
48
49
  spark_operator_version=config.spark_operator_version,
49
50
  calculate_artifact_hash=config.artifacts.calculate_hash,
50
51
  generate_artifact_target_path_from_artifact_hash=config.artifacts.generate_target_path_from_artifact_hash,
@@ -101,6 +102,9 @@ class ClientSpec(
101
102
  feature_store_data_prefixes=self._get_config_value_if_not_default(
102
103
  "feature_store.data_prefixes"
103
104
  ),
105
+ model_endpoint_monitoring_store_type=self._get_config_value_if_not_default(
106
+ "model_endpoint_monitoring.store_type"
107
+ ),
104
108
  )
105
109
 
106
110
  @staticmethod
@@ -58,6 +58,11 @@ class FeatureStore(
58
58
  if not feature_set.spec.engine:
59
59
  feature_set.spec.engine = "storey"
60
60
 
61
+ if not feature_set.status.state:
62
+ feature_set.status.state = (
63
+ mlrun.common.schemas.object.ObjectStatusState.CREATED
64
+ )
65
+
61
66
  return self._store_object(
62
67
  db_session,
63
68
  project,