mlrun 1.7.0rc28__py3-none-any.whl → 1.7.0rc55__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (135) hide show
  1. mlrun/__main__.py +4 -2
  2. mlrun/alerts/alert.py +75 -8
  3. mlrun/artifacts/base.py +1 -0
  4. mlrun/artifacts/manager.py +9 -2
  5. mlrun/common/constants.py +4 -1
  6. mlrun/common/db/sql_session.py +3 -2
  7. mlrun/common/formatters/__init__.py +1 -0
  8. mlrun/common/formatters/artifact.py +1 -0
  9. mlrun/{model_monitoring/application.py → common/formatters/feature_set.py} +20 -6
  10. mlrun/common/formatters/run.py +3 -0
  11. mlrun/common/helpers.py +0 -1
  12. mlrun/common/schemas/__init__.py +3 -1
  13. mlrun/common/schemas/alert.py +15 -12
  14. mlrun/common/schemas/api_gateway.py +6 -6
  15. mlrun/common/schemas/auth.py +5 -0
  16. mlrun/common/schemas/client_spec.py +0 -1
  17. mlrun/common/schemas/common.py +7 -4
  18. mlrun/common/schemas/frontend_spec.py +7 -0
  19. mlrun/common/schemas/function.py +7 -0
  20. mlrun/common/schemas/model_monitoring/__init__.py +4 -3
  21. mlrun/common/schemas/model_monitoring/constants.py +41 -26
  22. mlrun/common/schemas/model_monitoring/model_endpoints.py +23 -47
  23. mlrun/common/schemas/notification.py +69 -12
  24. mlrun/common/schemas/project.py +45 -12
  25. mlrun/common/schemas/workflow.py +10 -2
  26. mlrun/common/types.py +1 -0
  27. mlrun/config.py +91 -35
  28. mlrun/data_types/data_types.py +6 -1
  29. mlrun/data_types/spark.py +2 -2
  30. mlrun/data_types/to_pandas.py +57 -25
  31. mlrun/datastore/__init__.py +1 -0
  32. mlrun/datastore/alibaba_oss.py +3 -2
  33. mlrun/datastore/azure_blob.py +125 -37
  34. mlrun/datastore/base.py +42 -21
  35. mlrun/datastore/datastore.py +4 -2
  36. mlrun/datastore/datastore_profile.py +1 -1
  37. mlrun/datastore/dbfs_store.py +3 -7
  38. mlrun/datastore/filestore.py +1 -3
  39. mlrun/datastore/google_cloud_storage.py +85 -29
  40. mlrun/datastore/inmem.py +4 -1
  41. mlrun/datastore/redis.py +1 -0
  42. mlrun/datastore/s3.py +25 -12
  43. mlrun/datastore/sources.py +76 -4
  44. mlrun/datastore/spark_utils.py +30 -0
  45. mlrun/datastore/storeytargets.py +151 -0
  46. mlrun/datastore/targets.py +102 -131
  47. mlrun/datastore/v3io.py +1 -0
  48. mlrun/db/base.py +15 -6
  49. mlrun/db/httpdb.py +57 -28
  50. mlrun/db/nopdb.py +29 -5
  51. mlrun/errors.py +20 -3
  52. mlrun/execution.py +46 -5
  53. mlrun/feature_store/api.py +25 -1
  54. mlrun/feature_store/common.py +6 -11
  55. mlrun/feature_store/feature_vector.py +3 -1
  56. mlrun/feature_store/retrieval/job.py +4 -1
  57. mlrun/feature_store/retrieval/spark_merger.py +10 -39
  58. mlrun/feature_store/steps.py +8 -0
  59. mlrun/frameworks/_common/plan.py +3 -3
  60. mlrun/frameworks/_ml_common/plan.py +1 -1
  61. mlrun/frameworks/parallel_coordinates.py +2 -3
  62. mlrun/frameworks/sklearn/mlrun_interface.py +13 -3
  63. mlrun/k8s_utils.py +48 -2
  64. mlrun/launcher/client.py +6 -6
  65. mlrun/launcher/local.py +2 -2
  66. mlrun/model.py +215 -34
  67. mlrun/model_monitoring/api.py +38 -24
  68. mlrun/model_monitoring/applications/__init__.py +1 -2
  69. mlrun/model_monitoring/applications/_application_steps.py +60 -29
  70. mlrun/model_monitoring/applications/base.py +2 -174
  71. mlrun/model_monitoring/applications/context.py +197 -70
  72. mlrun/model_monitoring/applications/evidently_base.py +11 -85
  73. mlrun/model_monitoring/applications/histogram_data_drift.py +21 -16
  74. mlrun/model_monitoring/applications/results.py +4 -4
  75. mlrun/model_monitoring/controller.py +110 -282
  76. mlrun/model_monitoring/db/stores/__init__.py +8 -3
  77. mlrun/model_monitoring/db/stores/base/store.py +3 -0
  78. mlrun/model_monitoring/db/stores/sqldb/models/base.py +9 -7
  79. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +18 -3
  80. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +43 -23
  81. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +48 -35
  82. mlrun/model_monitoring/db/tsdb/__init__.py +7 -2
  83. mlrun/model_monitoring/db/tsdb/base.py +147 -15
  84. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +94 -55
  85. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -3
  86. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +144 -38
  87. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +44 -3
  88. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +246 -57
  89. mlrun/model_monitoring/helpers.py +70 -50
  90. mlrun/model_monitoring/stream_processing.py +96 -195
  91. mlrun/model_monitoring/writer.py +13 -5
  92. mlrun/package/packagers/default_packager.py +2 -2
  93. mlrun/projects/operations.py +16 -8
  94. mlrun/projects/pipelines.py +126 -115
  95. mlrun/projects/project.py +286 -129
  96. mlrun/render.py +3 -3
  97. mlrun/run.py +38 -19
  98. mlrun/runtimes/__init__.py +19 -8
  99. mlrun/runtimes/base.py +4 -1
  100. mlrun/runtimes/daskjob.py +1 -1
  101. mlrun/runtimes/funcdoc.py +1 -1
  102. mlrun/runtimes/kubejob.py +6 -6
  103. mlrun/runtimes/local.py +12 -5
  104. mlrun/runtimes/nuclio/api_gateway.py +68 -8
  105. mlrun/runtimes/nuclio/application/application.py +307 -70
  106. mlrun/runtimes/nuclio/function.py +63 -14
  107. mlrun/runtimes/nuclio/serving.py +10 -10
  108. mlrun/runtimes/pod.py +25 -19
  109. mlrun/runtimes/remotesparkjob.py +2 -5
  110. mlrun/runtimes/sparkjob/spark3job.py +16 -17
  111. mlrun/runtimes/utils.py +34 -0
  112. mlrun/serving/routers.py +2 -5
  113. mlrun/serving/server.py +37 -19
  114. mlrun/serving/states.py +30 -3
  115. mlrun/serving/v2_serving.py +44 -35
  116. mlrun/track/trackers/mlflow_tracker.py +5 -0
  117. mlrun/utils/async_http.py +1 -1
  118. mlrun/utils/db.py +18 -0
  119. mlrun/utils/helpers.py +150 -36
  120. mlrun/utils/http.py +1 -1
  121. mlrun/utils/notifications/notification/__init__.py +0 -1
  122. mlrun/utils/notifications/notification/webhook.py +8 -1
  123. mlrun/utils/notifications/notification_pusher.py +1 -1
  124. mlrun/utils/v3io_clients.py +2 -2
  125. mlrun/utils/version/version.json +2 -2
  126. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/METADATA +153 -66
  127. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/RECORD +131 -134
  128. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/WHEEL +1 -1
  129. mlrun/feature_store/retrieval/conversion.py +0 -271
  130. mlrun/model_monitoring/controller_handler.py +0 -37
  131. mlrun/model_monitoring/evidently_application.py +0 -20
  132. mlrun/model_monitoring/prometheus.py +0 -216
  133. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/LICENSE +0 -0
  134. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/entry_points.txt +0 -0
  135. {mlrun-1.7.0rc28.dist-info → mlrun-1.7.0rc55.dist-info}/top_level.txt +0 -0
@@ -14,27 +14,32 @@
14
14
 
15
15
  import enum
16
16
  import json
17
- import re
18
17
  from datetime import datetime
19
- from typing import Any, NamedTuple, Optional
18
+ from typing import Any, NamedTuple, Optional, TypeVar
20
19
 
21
- from pydantic import BaseModel, Field, validator
22
- from pydantic.main import Extra
20
+ from pydantic import BaseModel, Extra, Field, constr, validator
23
21
 
22
+ # TODO: remove the unused import below after `mlrun.datastore` and `mlrun.utils` usage is removed.
23
+ # At the moment `make lint` fails if this is removed.
24
24
  import mlrun.common.model_monitoring
25
- import mlrun.common.types
26
25
 
27
26
  from ..object import ObjectKind, ObjectSpec, ObjectStatus
28
27
  from .constants import (
28
+ FQN_REGEX,
29
+ MODEL_ENDPOINT_ID_PATTERN,
30
+ PROJECT_PATTERN,
29
31
  EndpointType,
30
32
  EventFieldType,
31
33
  EventKeyMetrics,
32
34
  EventLiveStats,
35
+ ModelEndpointMonitoringMetricType,
33
36
  ModelMonitoringMode,
34
37
  ResultKindApp,
35
38
  ResultStatusApp,
36
39
  )
37
40
 
41
+ Model = TypeVar("Model", bound=BaseModel)
42
+
38
43
 
39
44
  class ModelMonitoringStoreKinds:
40
45
  # TODO: do changes in examples & demos In 1.5.0 remove
@@ -43,9 +48,9 @@ class ModelMonitoringStoreKinds:
43
48
 
44
49
 
45
50
  class ModelEndpointMetadata(BaseModel):
46
- project: Optional[str] = ""
51
+ project: constr(regex=PROJECT_PATTERN)
52
+ uid: constr(regex=MODEL_ENDPOINT_ID_PATTERN)
47
53
  labels: Optional[dict] = {}
48
- uid: Optional[str] = ""
49
54
 
50
55
  class Config:
51
56
  extra = Extra.allow
@@ -58,12 +63,11 @@ class ModelEndpointMetadata(BaseModel):
58
63
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
59
64
  dictionary using json.loads().
60
65
  """
61
- new_object = cls()
62
66
  if json_parse_values is None:
63
67
  json_parse_values = [EventFieldType.LABELS]
64
68
 
65
69
  return _mapping_attributes(
66
- base_model=new_object,
70
+ model_class=cls,
67
71
  flattened_dictionary=endpoint_dict,
68
72
  json_parse_values=json_parse_values,
69
73
  )
@@ -90,7 +94,6 @@ class ModelEndpointSpec(ObjectSpec):
90
94
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
91
95
  dictionary using json.loads().
92
96
  """
93
- new_object = cls()
94
97
  if json_parse_values is None:
95
98
  json_parse_values = [
96
99
  EventFieldType.FEATURE_NAMES,
@@ -98,7 +101,7 @@ class ModelEndpointSpec(ObjectSpec):
98
101
  EventFieldType.MONITOR_CONFIGURATION,
99
102
  ]
100
103
  return _mapping_attributes(
101
- base_model=new_object,
104
+ model_class=cls,
102
105
  flattened_dictionary=endpoint_dict,
103
106
  json_parse_values=json_parse_values,
104
107
  )
@@ -192,7 +195,6 @@ class ModelEndpointStatus(ObjectStatus):
192
195
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
193
196
  dictionary using json.loads().
194
197
  """
195
- new_object = cls()
196
198
  if json_parse_values is None:
197
199
  json_parse_values = [
198
200
  EventFieldType.FEATURE_STATS,
@@ -204,7 +206,7 @@ class ModelEndpointStatus(ObjectStatus):
204
206
  EventFieldType.ENDPOINT_TYPE,
205
207
  ]
206
208
  return _mapping_attributes(
207
- base_model=new_object,
209
+ model_class=cls,
208
210
  flattened_dictionary=endpoint_dict,
209
211
  json_parse_values=json_parse_values,
210
212
  )
@@ -212,22 +214,13 @@ class ModelEndpointStatus(ObjectStatus):
212
214
 
213
215
  class ModelEndpoint(BaseModel):
214
216
  kind: ObjectKind = Field(ObjectKind.model_endpoint, const=True)
215
- metadata: ModelEndpointMetadata = ModelEndpointMetadata()
217
+ metadata: ModelEndpointMetadata
216
218
  spec: ModelEndpointSpec = ModelEndpointSpec()
217
219
  status: ModelEndpointStatus = ModelEndpointStatus()
218
220
 
219
221
  class Config:
220
222
  extra = Extra.allow
221
223
 
222
- def __init__(self, **data: Any):
223
- super().__init__(**data)
224
- if self.metadata.uid is None:
225
- uid = mlrun.common.model_monitoring.create_model_endpoint_uid(
226
- function_uri=self.spec.function_uri,
227
- versioned_model=self.spec.model,
228
- )
229
- self.metadata.uid = str(uid)
230
-
231
224
  def flat_dict(self):
232
225
  """Generate a flattened `ModelEndpoint` dictionary. The flattened dictionary result is important for storing
233
226
  the model endpoint object in the database.
@@ -268,7 +261,7 @@ class ModelEndpoint(BaseModel):
268
261
  return flatten_dict
269
262
 
270
263
  @classmethod
271
- def from_flat_dict(cls, endpoint_dict: dict):
264
+ def from_flat_dict(cls, endpoint_dict: dict) -> "ModelEndpoint":
272
265
  """Create a `ModelEndpoint` object from an endpoint flattened dictionary. Because the provided dictionary
273
266
  is flattened, we pass it as is to the subclasses without splitting the keys into spec, metadata, and status.
274
267
 
@@ -286,11 +279,6 @@ class ModelEndpointList(BaseModel):
286
279
  endpoints: list[ModelEndpoint] = []
287
280
 
288
281
 
289
- class ModelEndpointMonitoringMetricType(mlrun.common.types.StrEnum):
290
- RESULT = "result"
291
- METRIC = "metric"
292
-
293
-
294
282
  class ModelEndpointMonitoringMetric(BaseModel):
295
283
  project: str
296
284
  app: str
@@ -309,18 +297,8 @@ def _compose_full_name(
309
297
  return ".".join([project, app, type, name])
310
298
 
311
299
 
312
- _FQN_PART_PATTERN = r"[a-zA-Z0-9_-]+"
313
- _FQN_PATTERN = (
314
- rf"^(?P<project>{_FQN_PART_PATTERN})\."
315
- rf"(?P<app>{_FQN_PART_PATTERN})\."
316
- rf"(?P<type>{ModelEndpointMonitoringMetricType.RESULT}|{ModelEndpointMonitoringMetricType.METRIC})\."
317
- rf"(?P<name>{_FQN_PART_PATTERN})$"
318
- )
319
- _FQN_REGEX = re.compile(_FQN_PATTERN)
320
-
321
-
322
300
  def _parse_metric_fqn_to_monitoring_metric(fqn: str) -> ModelEndpointMonitoringMetric:
323
- match = _FQN_REGEX.fullmatch(fqn)
301
+ match = FQN_REGEX.fullmatch(fqn)
324
302
  if match is None:
325
303
  raise ValueError("The fully qualified name is not in the expected format")
326
304
  return ModelEndpointMonitoringMetric.parse_obj(
@@ -365,20 +343,18 @@ class ModelEndpointMonitoringMetricNoData(_ModelEndpointMonitoringMetricValuesBa
365
343
 
366
344
 
367
345
  def _mapping_attributes(
368
- base_model: BaseModel,
369
- flattened_dictionary: dict,
370
- json_parse_values: list = None,
371
- ):
346
+ model_class: type[Model], flattened_dictionary: dict, json_parse_values: list
347
+ ) -> Model:
372
348
  """Generate a `BaseModel` object with the provided dictionary attributes.
373
349
 
374
- :param base_model: `BaseModel` object (e.g. `ModelEndpointMetadata`).
350
+ :param model_class: `BaseModel` class (e.g. `ModelEndpointMetadata`).
375
351
  :param flattened_dictionary: Flattened dictionary that contains the model endpoint attributes.
376
352
  :param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
377
353
  dictionary using json.loads().
378
354
  """
379
355
  # Get the fields of the provided base model object. These fields will be used to filter to relevent keys
380
356
  # from the flattened dictionary.
381
- wanted_keys = base_model.__fields__.keys()
357
+ wanted_keys = model_class.__fields__.keys()
382
358
 
383
359
  # Generate a filtered flattened dictionary that will be parsed into the BaseModel object
384
360
  dict_to_parse = {}
@@ -392,7 +368,7 @@ def _mapping_attributes(
392
368
  else:
393
369
  dict_to_parse[field_key] = flattened_dictionary[field_key]
394
370
 
395
- return base_model.parse_obj(dict_to_parse)
371
+ return model_class.parse_obj(dict_to_parse)
396
372
 
397
373
 
398
374
  def _json_loads_if_not_none(field: Any) -> Any:
@@ -22,11 +22,48 @@ import mlrun.common.types
22
22
 
23
23
 
24
24
  class NotificationKind(mlrun.common.types.StrEnum):
25
- console = "console"
26
- git = "git"
27
- ipython = "ipython"
28
- slack = "slack"
29
- webhook = "webhook"
25
+ """Currently, the supported notification kinds and their params are as follows:"""
26
+
27
+ console: str = "console"
28
+ """no params, local only"""
29
+
30
+ git: str = "git"
31
+ """
32
+ **token** - The git token to use for the git notification.\n
33
+ **repo** - The git repo to which to send the notification.\n
34
+ **issue** - The git issue to which to send the notification.\n
35
+ **merge_request** -
36
+ In GitLab (as opposed to GitHub), merge requests and issues are separate entities.
37
+ If using merge request, the issue will be ignored, and vice versa.\n
38
+ **server** - The git server to which to send the notification.\n
39
+ **gitlab** - (bool) Whether the git server is GitLab or not.\n
40
+ """
41
+
42
+ ipython: str = "ipython"
43
+ """no params, local only"""
44
+
45
+ slack: str = "slack"
46
+ """**webhook** - The slack webhook to which to send the notification."""
47
+
48
+ webhook: str = "webhook"
49
+ """
50
+ **url** - The webhook url to which to send the notification.\n
51
+ **method** - The http method to use when sending the notification (GET, POST, PUT, etc…).\n
52
+ **headers** - (dict) The http headers to send with the notification.\n
53
+ **override_body** -
54
+ (dict) The body to send with the notification. If not specified, the
55
+ default body will be a dictionary containing `name`, `message`, `severity`, and a `runs` list of the
56
+ completed runs. You can also add the run's details.\n
57
+ Example::
58
+
59
+ "override_body": {"message":"Run Completed {{ runs }}"
60
+ # Results would look like:
61
+ "message": "Run Completed [{'project': 'my-project', 'name': 'my-function', 'host': <run-host>,
62
+ 'status': {'state': 'completed', 'results': <run-results>}}]"
63
+ **verify_ssl** -
64
+ (bool) Whether SSL certificates are validated during HTTP requests or not.
65
+ The default is set to True.\n
66
+ """
30
67
 
31
68
 
32
69
  class NotificationSeverity(mlrun.common.types.StrEnum):
@@ -50,15 +87,35 @@ class NotificationLimits(enum.Enum):
50
87
 
51
88
 
52
89
  class Notification(pydantic.BaseModel):
90
+ """
91
+ Notification object schema
92
+
93
+ :param kind: notification implementation kind - slack, webhook, etc.
94
+ :param name: for logging and identification
95
+ :param message: message content in the notification
96
+ :param severity: severity to display in the notification
97
+ :param when: list of statuses to trigger the notification: 'running', 'completed', 'error'
98
+ :param condition: optional condition to trigger the notification, a jinja2 expression that can use run data
99
+ to evaluate if the notification should be sent in addition to the 'when' statuses.
100
+ e.g.: '{{ run["status"]["results"]["accuracy"] < 0.9}}'
101
+ :param params: Implementation specific parameters for the notification implementation (e.g. slack webhook url,
102
+ git repository details, etc.)
103
+ :param secret_params: secret parameters for the notification implementation, same as params but will be stored
104
+ in a k8s secret and passed as a secret reference to the implementation.
105
+ :param status: notification status - pending, sent, error
106
+ :param sent_time: time the notification was sent
107
+ :param reason: failure reason if the notification failed to send
108
+ """
109
+
53
110
  kind: NotificationKind
54
111
  name: str
55
- message: str
56
- severity: NotificationSeverity
57
- when: list[str]
58
- condition: str
59
- params: dict[str, typing.Any] = None
60
- status: NotificationStatus = None
61
- sent_time: typing.Union[str, datetime.datetime] = None
112
+ message: typing.Optional[str] = None
113
+ severity: typing.Optional[NotificationSeverity] = None
114
+ when: typing.Optional[list[str]] = None
115
+ condition: typing.Optional[str] = None
116
+ params: typing.Optional[dict[str, typing.Any]] = None
117
+ status: typing.Optional[NotificationStatus] = None
118
+ sent_time: typing.Optional[typing.Union[str, datetime.datetime]] = None
62
119
  secret_params: typing.Optional[dict[str, typing.Any]] = None
63
120
  reason: typing.Optional[str] = None
64
121
 
@@ -100,6 +100,29 @@ class ProjectSpec(pydantic.BaseModel):
100
100
  extra = pydantic.Extra.allow
101
101
 
102
102
 
103
+ class ProjectSpecOut(pydantic.BaseModel):
104
+ description: typing.Optional[str] = None
105
+ owner: typing.Optional[str] = None
106
+ goals: typing.Optional[str] = None
107
+ params: typing.Optional[dict] = {}
108
+ functions: typing.Optional[list] = []
109
+ workflows: typing.Optional[list] = []
110
+ artifacts: typing.Optional[list] = []
111
+ artifact_path: typing.Optional[str] = None
112
+ conda: typing.Optional[str] = None
113
+ source: typing.Optional[str] = None
114
+ subpath: typing.Optional[str] = None
115
+ origin_url: typing.Optional[str] = None
116
+ desired_state: typing.Optional[ProjectDesiredState] = ProjectDesiredState.online
117
+ custom_packagers: typing.Optional[list[tuple[str, bool]]] = None
118
+ default_image: typing.Optional[str] = None
119
+ build: typing.Any = None
120
+ default_function_node_selector: typing.Optional[dict] = {}
121
+
122
+ class Config:
123
+ extra = pydantic.Extra.allow
124
+
125
+
103
126
  class Project(pydantic.BaseModel):
104
127
  kind: ObjectKind = pydantic.Field(ObjectKind.project, const=True)
105
128
  metadata: ProjectMetadata
@@ -107,6 +130,15 @@ class Project(pydantic.BaseModel):
107
130
  status: ObjectStatus = ObjectStatus()
108
131
 
109
132
 
133
+ # The reason we have a different schema for the response model is that we don't want to validate project.spec.build in
134
+ # the response as the validation was added late and there may be corrupted values in the DB.
135
+ class ProjectOut(pydantic.BaseModel):
136
+ kind: ObjectKind = pydantic.Field(ObjectKind.project, const=True)
137
+ metadata: ProjectMetadata
138
+ spec: ProjectSpecOut = ProjectSpecOut()
139
+ status: ObjectStatus = ObjectStatus()
140
+
141
+
110
142
  class ProjectOwner(pydantic.BaseModel):
111
143
  username: str
112
144
  access_key: str
@@ -114,18 +146,19 @@ class ProjectOwner(pydantic.BaseModel):
114
146
 
115
147
  class ProjectSummary(pydantic.BaseModel):
116
148
  name: str
117
- files_count: int
118
- feature_sets_count: int
119
- models_count: int
120
- runs_completed_recent_count: int
121
- runs_failed_recent_count: int
122
- runs_running_count: int
123
- distinct_schedules_count: int
124
- distinct_scheduled_jobs_pending_count: int
125
- distinct_scheduled_pipelines_pending_count: int
149
+ files_count: int = 0
150
+ feature_sets_count: int = 0
151
+ models_count: int = 0
152
+ runs_completed_recent_count: int = 0
153
+ runs_failed_recent_count: int = 0
154
+ runs_running_count: int = 0
155
+ distinct_schedules_count: int = 0
156
+ distinct_scheduled_jobs_pending_count: int = 0
157
+ distinct_scheduled_pipelines_pending_count: int = 0
126
158
  pipelines_completed_recent_count: typing.Optional[int] = None
127
159
  pipelines_failed_recent_count: typing.Optional[int] = None
128
160
  pipelines_running_count: typing.Optional[int] = None
161
+ updated: typing.Optional[datetime.datetime] = None
129
162
 
130
163
 
131
164
  class IguazioProject(pydantic.BaseModel):
@@ -133,16 +166,16 @@ class IguazioProject(pydantic.BaseModel):
133
166
 
134
167
 
135
168
  # The format query param controls the project type used:
136
- # full - Project
169
+ # full - ProjectOut
137
170
  # name_only - str
138
171
  # summary - ProjectSummary
139
172
  # leader - currently only IguazioProject supported
140
173
  # The way pydantic handles typing.Union is that it takes the object and tries to coerce it to be the types of the
141
- # union by the definition order. Therefore we can't currently add generic dict for all leader formats, but we need
174
+ # union by the definition order. Therefore, we can't currently add generic dict for all leader formats, but we need
142
175
  # to add a specific classes for them. it's frustrating but couldn't find other workaround, see:
143
176
  # https://github.com/samuelcolvin/pydantic/issues/1423, https://github.com/samuelcolvin/pydantic/issues/619
144
177
  ProjectOutput = typing.TypeVar(
145
- "ProjectOutput", Project, str, ProjectSummary, IguazioProject
178
+ "ProjectOutput", ProjectOut, str, ProjectSummary, IguazioProject
146
179
  )
147
180
 
148
181
 
@@ -16,8 +16,9 @@ import typing
16
16
 
17
17
  import pydantic
18
18
 
19
- from .notification import Notification
20
- from .schedule import ScheduleCronTrigger
19
+ from mlrun.common.schemas.notification import Notification
20
+ from mlrun.common.schemas.schedule import ScheduleCronTrigger
21
+ from mlrun.common.types import StrEnum
21
22
 
22
23
 
23
24
  class WorkflowSpec(pydantic.BaseModel):
@@ -32,6 +33,7 @@ class WorkflowSpec(pydantic.BaseModel):
32
33
  schedule: typing.Union[str, ScheduleCronTrigger] = None
33
34
  run_local: typing.Optional[bool] = None
34
35
  image: typing.Optional[str] = None
36
+ workflow_runner_node_selector: typing.Optional[dict[str, str]] = None
35
37
 
36
38
 
37
39
  class WorkflowRequest(pydantic.BaseModel):
@@ -54,3 +56,9 @@ class WorkflowResponse(pydantic.BaseModel):
54
56
 
55
57
  class GetWorkflowResponse(pydantic.BaseModel):
56
58
  workflow_id: str = None
59
+
60
+
61
+ class EngineType(StrEnum):
62
+ LOCAL = "local"
63
+ REMOTE = "remote"
64
+ KFP = "kfp"
mlrun/common/types.py CHANGED
@@ -30,6 +30,7 @@ class HTTPMethod(StrEnum):
30
30
  GET = "GET"
31
31
  POST = "POST"
32
32
  DELETE = "DELETE"
33
+ PATCH = "PATCH"
33
34
 
34
35
 
35
36
  class Operation(StrEnum):