mlrun 1.7.0rc17__py3-none-any.whl → 1.7.0rc19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (90) hide show
  1. mlrun/__main__.py +5 -2
  2. mlrun/alerts/alert.py +1 -1
  3. mlrun/artifacts/manager.py +5 -1
  4. mlrun/common/constants.py +64 -3
  5. mlrun/common/formatters/__init__.py +16 -0
  6. mlrun/common/formatters/base.py +59 -0
  7. mlrun/common/formatters/function.py +41 -0
  8. mlrun/common/runtimes/constants.py +32 -4
  9. mlrun/common/schemas/__init__.py +1 -2
  10. mlrun/common/schemas/alert.py +31 -9
  11. mlrun/common/schemas/api_gateway.py +52 -0
  12. mlrun/common/schemas/client_spec.py +1 -0
  13. mlrun/common/schemas/frontend_spec.py +1 -0
  14. mlrun/common/schemas/function.py +4 -0
  15. mlrun/common/schemas/model_monitoring/__init__.py +9 -4
  16. mlrun/common/schemas/model_monitoring/constants.py +22 -8
  17. mlrun/common/schemas/model_monitoring/grafana.py +9 -5
  18. mlrun/common/schemas/model_monitoring/model_endpoints.py +17 -6
  19. mlrun/config.py +9 -2
  20. mlrun/data_types/to_pandas.py +5 -5
  21. mlrun/datastore/datastore.py +6 -2
  22. mlrun/datastore/redis.py +2 -2
  23. mlrun/datastore/s3.py +5 -0
  24. mlrun/datastore/sources.py +106 -7
  25. mlrun/datastore/store_resources.py +5 -1
  26. mlrun/datastore/targets.py +5 -4
  27. mlrun/datastore/utils.py +42 -0
  28. mlrun/db/base.py +5 -1
  29. mlrun/db/httpdb.py +22 -3
  30. mlrun/db/nopdb.py +5 -1
  31. mlrun/errors.py +6 -0
  32. mlrun/execution.py +16 -6
  33. mlrun/feature_store/ingestion.py +7 -6
  34. mlrun/feature_store/retrieval/conversion.py +5 -5
  35. mlrun/feature_store/retrieval/job.py +7 -3
  36. mlrun/feature_store/retrieval/spark_merger.py +2 -1
  37. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -2
  38. mlrun/frameworks/parallel_coordinates.py +2 -1
  39. mlrun/frameworks/tf_keras/__init__.py +4 -1
  40. mlrun/launcher/client.py +4 -2
  41. mlrun/launcher/local.py +8 -2
  42. mlrun/launcher/remote.py +8 -2
  43. mlrun/model.py +5 -1
  44. mlrun/model_monitoring/db/stores/__init__.py +0 -2
  45. mlrun/model_monitoring/db/stores/base/store.py +16 -4
  46. mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +43 -21
  47. mlrun/model_monitoring/db/stores/sqldb/models/base.py +32 -2
  48. mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +25 -5
  49. mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +5 -0
  50. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +235 -166
  51. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +190 -91
  52. mlrun/model_monitoring/db/tsdb/__init__.py +35 -6
  53. mlrun/model_monitoring/db/tsdb/base.py +232 -38
  54. mlrun/model_monitoring/db/tsdb/helpers.py +30 -0
  55. mlrun/model_monitoring/db/tsdb/tdengine/__init__.py +15 -0
  56. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +240 -0
  57. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +45 -0
  58. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +397 -0
  59. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +292 -104
  60. mlrun/model_monitoring/helpers.py +45 -0
  61. mlrun/model_monitoring/stream_processing.py +7 -4
  62. mlrun/model_monitoring/writer.py +50 -20
  63. mlrun/package/utils/_formatter.py +2 -2
  64. mlrun/projects/operations.py +8 -5
  65. mlrun/projects/pipelines.py +42 -15
  66. mlrun/projects/project.py +55 -14
  67. mlrun/render.py +8 -5
  68. mlrun/runtimes/base.py +2 -1
  69. mlrun/runtimes/databricks_job/databricks_wrapper.py +1 -1
  70. mlrun/runtimes/local.py +4 -1
  71. mlrun/runtimes/nuclio/api_gateway.py +32 -8
  72. mlrun/runtimes/nuclio/application/application.py +3 -3
  73. mlrun/runtimes/nuclio/function.py +1 -4
  74. mlrun/runtimes/utils.py +5 -6
  75. mlrun/serving/server.py +2 -1
  76. mlrun/utils/async_http.py +25 -5
  77. mlrun/utils/helpers.py +28 -7
  78. mlrun/utils/logger.py +28 -1
  79. mlrun/utils/notifications/notification/__init__.py +14 -9
  80. mlrun/utils/notifications/notification/slack.py +27 -7
  81. mlrun/utils/notifications/notification_pusher.py +47 -42
  82. mlrun/utils/v3io_clients.py +0 -1
  83. mlrun/utils/version/version.json +2 -2
  84. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/METADATA +9 -4
  85. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/RECORD +89 -82
  86. mlrun/model_monitoring/db/v3io_tsdb_reader.py +0 -134
  87. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/LICENSE +0 -0
  88. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/WHEEL +0 -0
  89. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/entry_points.txt +0 -0
  90. {mlrun-1.7.0rc17.dist-info → mlrun-1.7.0rc19.dist-info}/top_level.txt +0 -0
mlrun/__main__.py CHANGED
@@ -31,6 +31,7 @@ from mlrun_pipelines.mounts import auto_mount as auto_mount_modifier
31
31
  from tabulate import tabulate
32
32
 
33
33
  import mlrun
34
+ import mlrun.common.constants as mlrun_constants
34
35
  import mlrun.common.schemas
35
36
  from mlrun.common.helpers import parse_versioned_object_uri
36
37
 
@@ -256,8 +257,10 @@ def run(
256
257
  runobj.metadata.labels[k] = v
257
258
 
258
259
  if workflow:
259
- runobj.metadata.labels["workflow"] = workflow
260
- runobj.metadata.labels["mlrun/runner-pod"] = socket.gethostname()
260
+ runobj.metadata.labels[mlrun_constants.MLRunInternalLabels.workflow] = workflow
261
+ runobj.metadata.labels[mlrun_constants.MLRunInternalLabels.runner_pod] = (
262
+ socket.gethostname()
263
+ )
261
264
 
262
265
  if db:
263
266
  mlconf.dbpath = db
mlrun/alerts/alert.py CHANGED
@@ -137,7 +137,7 @@ class AlertConfig(ModelObj):
137
137
  template = db.get_alert_template(template)
138
138
 
139
139
  # Extract parameters from the template and apply them to the AlertConfig object
140
- self.description = template.description
140
+ self.summary = template.summary
141
141
  self.severity = template.severity
142
142
  self.criteria = template.criteria
143
143
  self.trigger = template.trigger
@@ -72,6 +72,10 @@ class ArtifactProducer:
72
72
  def get_meta(self) -> dict:
73
73
  return {"kind": self.kind, "name": self.name, "tag": self.tag}
74
74
 
75
+ @property
76
+ def uid(self):
77
+ return None
78
+
75
79
 
76
80
  def dict_to_artifact(struct: dict) -> Artifact:
77
81
  kind = struct.get("kind", "")
@@ -262,7 +266,7 @@ class ArtifactManager:
262
266
  if target_path and item.is_dir and not target_path.endswith("/"):
263
267
  target_path += "/"
264
268
  target_path = template_artifact_path(
265
- artifact_path=target_path, project=producer.project
269
+ artifact_path=target_path, project=producer.project, run_uid=producer.uid
266
270
  )
267
271
  item.target_path = target_path
268
272
 
mlrun/common/constants.py CHANGED
@@ -12,12 +12,73 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
+
15
16
  IMAGE_NAME_ENRICH_REGISTRY_PREFIX = "." # prefix for image name to enrich with registry
16
- MLRUN_CREATED_LABEL = "mlrun-created"
17
- MLRUN_MODEL_CONF = "model-conf"
18
- MLRUN_SERVING_SPEC_MOUNT_PATH = f"/tmp/mlrun/{MLRUN_MODEL_CONF}"
17
+ MLRUN_SERVING_CONF = "serving-conf"
18
+ MLRUN_SERVING_SPEC_MOUNT_PATH = f"/tmp/mlrun/{MLRUN_SERVING_CONF}"
19
19
  MLRUN_SERVING_SPEC_FILENAME = "serving_spec.json"
20
20
  MLRUN_SERVING_SPEC_PATH = (
21
21
  f"{MLRUN_SERVING_SPEC_MOUNT_PATH}/{MLRUN_SERVING_SPEC_FILENAME}"
22
22
  )
23
+ MLRUN_FUNCTIONS_ANNOTATION = "mlrun/mlrun-functions"
23
24
  MYSQL_MEDIUMBLOB_SIZE_BYTES = 16 * 1024 * 1024
25
+ MLRUN_LABEL_PREFIX = "mlrun/"
26
+ DASK_LABEL_PREFIX = "dask.org/"
27
+ NUCLIO_LABEL_PREFIX = "nuclio.io/"
28
+
29
+
30
+ class MLRunInternalLabels:
31
+ ### dask
32
+ dask_cluster_name = f"{DASK_LABEL_PREFIX}cluster-name"
33
+ dask_component = f"{DASK_LABEL_PREFIX}component"
34
+
35
+ ### spark
36
+ spark_role = "spark-role"
37
+
38
+ ### mpi
39
+ mpi_job_name = "mpi-job-name"
40
+ mpi_job_role = "mpi-job-role"
41
+ mpi_role_type = "mpi_role_type"
42
+
43
+ ### nuclio
44
+ nuclio_project_name = f"{NUCLIO_LABEL_PREFIX}project-name"
45
+ nuclio_class = f"{NUCLIO_LABEL_PREFIX}class"
46
+
47
+ ### mlrun
48
+ mlrun_auth_key = "mlrun-auth-key"
49
+ mlrun_class = f"{MLRUN_LABEL_PREFIX}class"
50
+ client_python_version = f"{MLRUN_LABEL_PREFIX}client_python_version"
51
+ client_version = f"{MLRUN_LABEL_PREFIX}client_version"
52
+ function = f"{MLRUN_LABEL_PREFIX}function"
53
+ job = f"{MLRUN_LABEL_PREFIX}job"
54
+ name = f"{MLRUN_LABEL_PREFIX}name"
55
+ mlrun_owner = f"{MLRUN_LABEL_PREFIX}owner"
56
+ owner_domain = f"{MLRUN_LABEL_PREFIX}owner_domain"
57
+ project = f"{MLRUN_LABEL_PREFIX}project"
58
+ runner_pod = f"{MLRUN_LABEL_PREFIX}runner-pod"
59
+ schedule_name = f"{MLRUN_LABEL_PREFIX}schedule-name"
60
+ scrape_metrics = f"{MLRUN_LABEL_PREFIX}scrape-metrics"
61
+ tag = f"{MLRUN_LABEL_PREFIX}tag"
62
+ uid = f"{MLRUN_LABEL_PREFIX}uid"
63
+ username = f"{MLRUN_LABEL_PREFIX}username"
64
+ username_domain = f"{MLRUN_LABEL_PREFIX}username_domain"
65
+ task_name = f"{MLRUN_LABEL_PREFIX}task-name"
66
+ host = "host"
67
+ job_type = "job-type"
68
+ kind = "kind"
69
+ component = "component"
70
+ resource_name = "resource_name"
71
+ created = "mlrun-created"
72
+
73
+ owner = "owner"
74
+ v3io_user = "v3io_user"
75
+ workflow = "workflow"
76
+ feature_vector = "feature-vector"
77
+
78
+ @classmethod
79
+ def all(cls):
80
+ return [
81
+ value
82
+ for key, value in cls.__dict__.items()
83
+ if not key.startswith("__") and isinstance(value, str)
84
+ ]
@@ -0,0 +1,16 @@
1
+ # Copyright 2024 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+
16
+ from .function import FunctionFormat # noqa
@@ -0,0 +1,59 @@
1
+ # Copyright 2024 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+
16
+ import typing
17
+
18
+
19
+ class ObjectFormat:
20
+ full = "full"
21
+
22
+ @staticmethod
23
+ def format_method(_format: str) -> typing.Optional[typing.Callable]:
24
+ return {
25
+ ObjectFormat.full: None,
26
+ }[_format]
27
+
28
+ @classmethod
29
+ def format_obj(cls, obj: typing.Any, _format: str) -> typing.Any:
30
+ _format = _format or cls.full
31
+ format_method = cls.format_method(_format)
32
+ if not format_method:
33
+ return obj
34
+
35
+ return format_method(obj)
36
+
37
+ @staticmethod
38
+ def filter_obj_method(_filter: list[list[str]]) -> typing.Callable:
39
+ def _filter_method(obj: dict) -> dict:
40
+ formatted_obj = {}
41
+ for key_list in _filter:
42
+ obj_recursive_iterator = obj
43
+ formatted_obj_recursive_iterator = formatted_obj
44
+ for idx, key in enumerate(key_list):
45
+ if key not in obj_recursive_iterator:
46
+ break
47
+ value = (
48
+ {} if idx < len(key_list) - 1 else obj_recursive_iterator[key]
49
+ )
50
+ formatted_obj_recursive_iterator.setdefault(key, value)
51
+
52
+ obj_recursive_iterator = obj_recursive_iterator[key]
53
+ formatted_obj_recursive_iterator = formatted_obj_recursive_iterator[
54
+ key
55
+ ]
56
+
57
+ return formatted_obj
58
+
59
+ return _filter_method
@@ -0,0 +1,41 @@
1
+ # Copyright 2024 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+
16
+ import typing
17
+
18
+ import mlrun.common.types
19
+
20
+ from .base import ObjectFormat
21
+
22
+
23
+ class FunctionFormat(ObjectFormat, mlrun.common.types.StrEnum):
24
+ minimal = "minimal"
25
+
26
+ @staticmethod
27
+ def format_method(_format: str) -> typing.Optional[typing.Callable]:
28
+ return {
29
+ FunctionFormat.full: None,
30
+ FunctionFormat.minimal: FunctionFormat.filter_obj_method(
31
+ [
32
+ ["kind"],
33
+ ["metadata"],
34
+ ["status"],
35
+ ["spec", "description"],
36
+ ["spec", "image"],
37
+ ["spec", "default_handler"],
38
+ ["spec", "entry_points"],
39
+ ]
40
+ ),
41
+ }[_format]
@@ -15,6 +15,10 @@
15
15
  import enum
16
16
  import typing
17
17
 
18
+ import mlrun_pipelines.common.models
19
+
20
+ import mlrun.common.constants as mlrun_constants
21
+
18
22
 
19
23
  class PodPhases:
20
24
  """
@@ -122,8 +126,8 @@ class MPIJobCRDVersions:
122
126
  @staticmethod
123
127
  def role_label_by_version(version):
124
128
  return {
125
- MPIJobCRDVersions.v1alpha1: "mpi_role_type",
126
- MPIJobCRDVersions.v1: "mpi-job-role",
129
+ MPIJobCRDVersions.v1alpha1: mlrun_constants.MLRunInternalLabels.mpi_role_type,
130
+ MPIJobCRDVersions.v1: mlrun_constants.MLRunInternalLabels.mpi_job_role,
127
131
  }[version]
128
132
 
129
133
 
@@ -136,6 +140,7 @@ class RunStates:
136
140
  unknown = "unknown"
137
141
  aborted = "aborted"
138
142
  aborting = "aborting"
143
+ skipped = "skipped"
139
144
 
140
145
  @staticmethod
141
146
  def all():
@@ -148,6 +153,7 @@ class RunStates:
148
153
  RunStates.unknown,
149
154
  RunStates.aborted,
150
155
  RunStates.aborting,
156
+ RunStates.skipped,
151
157
  ]
152
158
 
153
159
  @staticmethod
@@ -156,6 +162,7 @@ class RunStates:
156
162
  RunStates.completed,
157
163
  RunStates.error,
158
164
  RunStates.aborted,
165
+ RunStates.skipped,
159
166
  ]
160
167
 
161
168
  @staticmethod
@@ -188,10 +195,31 @@ class RunStates:
188
195
  # TODO: add aborting state once we have it
189
196
  ]
190
197
 
198
+ @staticmethod
199
+ def run_state_to_pipeline_run_status(run_state: str):
200
+ if not run_state:
201
+ return mlrun_pipelines.common.models.RunStatuses.runtime_state_unspecified
191
202
 
203
+ if run_state not in RunStates.all():
204
+ raise ValueError(f"Invalid run state: {run_state}")
205
+
206
+ return {
207
+ RunStates.completed: mlrun_pipelines.common.models.RunStatuses.succeeded,
208
+ RunStates.error: mlrun_pipelines.common.models.RunStatuses.failed,
209
+ RunStates.running: mlrun_pipelines.common.models.RunStatuses.running,
210
+ RunStates.created: mlrun_pipelines.common.models.RunStatuses.pending,
211
+ RunStates.pending: mlrun_pipelines.common.models.RunStatuses.pending,
212
+ RunStates.unknown: mlrun_pipelines.common.models.RunStatuses.runtime_state_unspecified,
213
+ RunStates.aborted: mlrun_pipelines.common.models.RunStatuses.canceled,
214
+ RunStates.aborting: mlrun_pipelines.common.models.RunStatuses.canceling,
215
+ RunStates.skipped: mlrun_pipelines.common.models.RunStatuses.skipped,
216
+ }[run_state]
217
+
218
+
219
+ # TODO: remove this class in 1.9.0 - use only MlrunInternalLabels
192
220
  class RunLabels(enum.Enum):
193
- owner = "owner"
194
- v3io_user = "v3io_user"
221
+ owner = mlrun_constants.MLRunInternalLabels.owner
222
+ v3io_user = mlrun_constants.MLRunInternalLabels.v3io_user
195
223
 
196
224
  @staticmethod
197
225
  def all():
@@ -148,10 +148,9 @@ from .model_monitoring import (
148
148
  ModelMonitoringMode,
149
149
  ModelMonitoringStoreKinds,
150
150
  MonitoringFunctionNames,
151
- MonitoringTSDBTables,
152
151
  PrometheusEndpoints,
153
- TimeSeriesConnector,
154
152
  TSDBTarget,
153
+ V3IOTSDBTables,
155
154
  )
156
155
  from .notification import (
157
156
  Notification,
@@ -22,7 +22,7 @@ from mlrun.common.types import StrEnum
22
22
 
23
23
 
24
24
  class EventEntityKind(StrEnum):
25
- MODEL = "model"
25
+ MODEL_ENDPOINT_RESULT = "model-endpoint-result"
26
26
  JOB = "job"
27
27
 
28
28
 
@@ -33,14 +33,34 @@ class EventEntities(pydantic.BaseModel):
33
33
 
34
34
 
35
35
  class EventKind(StrEnum):
36
- DRIFT_DETECTED = "drift_detected"
37
- DRIFT_SUSPECTED = "drift_suspected"
36
+ DATA_DRIFT_DETECTED = "data_drift_detected"
37
+ DATA_DRIFT_SUSPECTED = "data_drift_suspected"
38
+ CONCEPT_DRIFT_DETECTED = "concept_drift_detected"
39
+ CONCEPT_DRIFT_SUSPECTED = "concept_drift_suspected"
40
+ MODEL_PERFORMANCE_DETECTED = "model_performance_detected"
41
+ MODEL_PERFORMANCE_SUSPECTED = "model_performance_suspected"
42
+ MODEL_SERVING_PERFORMANCE_DETECTED = "model_serving_performance_detected"
43
+ MODEL_SERVING_PERFORMANCE_SUSPECTED = "model_serving_performance_suspected"
44
+ MM_APP_ANOMALY_DETECTED = "mm_app_anomaly_detected"
45
+ MM_APP_ANOMALY_SUSPECTED = "mm_app_anomaly_suspected"
38
46
  FAILED = "failed"
39
47
 
40
48
 
41
49
  _event_kind_entity_map = {
42
- EventKind.DRIFT_SUSPECTED: [EventEntityKind.MODEL],
43
- EventKind.DRIFT_DETECTED: [EventEntityKind.MODEL],
50
+ EventKind.DATA_DRIFT_SUSPECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
51
+ EventKind.DATA_DRIFT_DETECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
52
+ EventKind.CONCEPT_DRIFT_DETECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
53
+ EventKind.CONCEPT_DRIFT_SUSPECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
54
+ EventKind.MODEL_PERFORMANCE_DETECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
55
+ EventKind.MODEL_PERFORMANCE_SUSPECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
56
+ EventKind.MODEL_SERVING_PERFORMANCE_DETECTED: [
57
+ EventEntityKind.MODEL_ENDPOINT_RESULT
58
+ ],
59
+ EventKind.MODEL_SERVING_PERFORMANCE_SUSPECTED: [
60
+ EventEntityKind.MODEL_ENDPOINT_RESULT
61
+ ],
62
+ EventKind.MM_APP_ANOMALY_DETECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
63
+ EventKind.MM_APP_ANOMALY_SUSPECTED: [EventEntityKind.MODEL_ENDPOINT_RESULT],
44
64
  EventKind.FAILED: [EventEntityKind.JOB],
45
65
  }
46
66
 
@@ -123,7 +143,8 @@ class AlertConfig(pydantic.BaseModel):
123
143
  pydantic.Field(
124
144
  description=(
125
145
  "String to be sent in the notifications generated."
126
- "e.g. 'Model {{ $project }}/{{ $entity }} is drifting.'"
146
+ "e.g. 'Model {{project}}/{{entity}} is drifting.'"
147
+ "Supported variables: project, entity, name"
127
148
  )
128
149
  ),
129
150
  ]
@@ -161,8 +182,9 @@ class AlertTemplate(
161
182
  system_generated: bool = False
162
183
 
163
184
  # AlertConfig fields that are pre-defined
164
- description: Optional[str] = (
165
- "String to be sent in the generated notifications e.g. 'Model {{ $project }}/{{ $entity }} is drifting.'"
185
+ summary: Optional[str] = (
186
+ "String to be sent in the generated notifications e.g. 'Model {{project}}/{{entity}} is drifting.'"
187
+ "See AlertConfig.summary description"
166
188
  )
167
189
  severity: AlertSeverity
168
190
  trigger: AlertTrigger
@@ -173,7 +195,7 @@ class AlertTemplate(
173
195
  def templates_differ(self, other):
174
196
  return (
175
197
  self.template_description != other.template_description
176
- or self.description != other.description
198
+ or self.summary != other.summary
177
199
  or self.severity != other.severity
178
200
  or self.trigger != other.trigger
179
201
  or self.reset_policy != other.reset_policy
@@ -18,6 +18,7 @@ from typing import Optional
18
18
  import pydantic
19
19
 
20
20
  import mlrun.common.types
21
+ from mlrun.common.constants import MLRUN_FUNCTIONS_ANNOTATION
21
22
 
22
23
 
23
24
  class APIGatewayAuthenticationMode(mlrun.common.types.StrEnum):
@@ -55,6 +56,7 @@ class APIGatewayMetadata(_APIGatewayBaseModel):
55
56
  name: str
56
57
  namespace: Optional[str]
57
58
  labels: Optional[dict] = {}
59
+ annotations: Optional[dict] = {}
58
60
 
59
61
 
60
62
  class APIGatewayBasicAuth(_APIGatewayBaseModel):
@@ -91,6 +93,56 @@ class APIGateway(_APIGatewayBaseModel):
91
93
  spec: APIGatewaySpec
92
94
  status: Optional[APIGatewayStatus]
93
95
 
96
+ def get_function_names(self):
97
+ return [
98
+ upstream.nucliofunction.get("name")
99
+ for upstream in self.spec.upstreams
100
+ if upstream.nucliofunction.get("name")
101
+ ]
102
+
103
+ def enrich_mlrun_function_names(self):
104
+ upstream_with_nuclio_names = []
105
+ mlrun_function_uris = []
106
+ for upstream in self.spec.upstreams:
107
+ uri = upstream.nucliofunction.get("name")
108
+ project, function_name, tag, _ = (
109
+ mlrun.common.helpers.parse_versioned_object_uri(uri)
110
+ )
111
+ upstream.nucliofunction["name"] = (
112
+ mlrun.runtimes.nuclio.function.get_fullname(function_name, project, tag)
113
+ )
114
+
115
+ upstream_with_nuclio_names.append(upstream)
116
+ mlrun_function_uris.append(uri)
117
+
118
+ self.spec.upstreams = upstream_with_nuclio_names
119
+ if len(mlrun_function_uris) == 1:
120
+ self.metadata.annotations[MLRUN_FUNCTIONS_ANNOTATION] = mlrun_function_uris[
121
+ 0
122
+ ]
123
+ elif len(mlrun_function_uris) == 2:
124
+ self.metadata.annotations[MLRUN_FUNCTIONS_ANNOTATION] = "&".join(
125
+ mlrun_function_uris
126
+ )
127
+ return self
128
+
129
+ def replace_nuclio_names_with_mlrun_uri(self):
130
+ mlrun_functions = self.metadata.annotations.get(MLRUN_FUNCTIONS_ANNOTATION)
131
+ if mlrun_functions:
132
+ mlrun_function_uris = (
133
+ mlrun_functions.split("&")
134
+ if "&" in mlrun_functions
135
+ else [mlrun_functions]
136
+ )
137
+ if len(mlrun_function_uris) != len(self.spec.upstreams):
138
+ raise mlrun.errors.MLRunValueError(
139
+ "Error when translating nuclio names to mlrun names in api gateway:"
140
+ " number of functions doesn't match the mlrun functions in annotation"
141
+ )
142
+ for i in range(len(mlrun_function_uris)):
143
+ self.spec.upstreams[i].nucliofunction["name"] = mlrun_function_uris[i]
144
+ return self
145
+
94
146
 
95
147
  class APIGatewaysOutput(_APIGatewayBaseModel):
96
148
  api_gateways: typing.Optional[dict[str, APIGateway]] = {}
@@ -59,6 +59,7 @@ class ClientSpec(pydantic.BaseModel):
59
59
  sql_url: typing.Optional[str]
60
60
  model_endpoint_monitoring_store_type: typing.Optional[str]
61
61
  model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
62
+ model_monitoring_tsdb_connection: typing.Optional[str]
62
63
  ce: typing.Optional[dict]
63
64
  # not passing them as one object as it possible client user would like to override only one of the params
64
65
  calculate_artifact_hash: typing.Optional[str]
@@ -70,3 +70,4 @@ class FrontendSpec(pydantic.BaseModel):
70
70
  feature_store_data_prefixes: typing.Optional[dict[str, str]]
71
71
  allowed_artifact_path_prefixes_list: list[str]
72
72
  ce: typing.Optional[dict]
73
+ internal_labels: list[str] = []
@@ -45,6 +45,9 @@ class FunctionState:
45
45
  # same goes for the build which is not coming from the pod, but is used and we can't just omit it for BC reasons
46
46
  build = "build"
47
47
 
48
+ # for pipeline steps
49
+ skipped = "skipped"
50
+
48
51
  @classmethod
49
52
  def get_function_state_from_pod_state(cls, pod_state: str):
50
53
  if pod_state == "succeeded":
@@ -60,6 +63,7 @@ class FunctionState:
60
63
  return [
61
64
  cls.ready,
62
65
  cls.error,
66
+ cls.skipped,
63
67
  ]
64
68
 
65
69
 
@@ -11,8 +11,6 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
- #
15
- # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
16
14
 
17
15
  from .constants import (
18
16
  ControllerPolicy,
@@ -30,20 +28,24 @@ from .constants import (
30
28
  ModelMonitoringMode,
31
29
  ModelMonitoringStoreKinds,
32
30
  MonitoringFunctionNames,
33
- MonitoringTSDBTables,
31
+ PredictionsQueryConstants,
34
32
  ProjectSecretKeys,
35
33
  PrometheusEndpoints,
36
34
  PrometheusMetric,
37
35
  ResultData,
36
+ ResultKindApp,
38
37
  SchedulingKeys,
39
- TimeSeriesConnector,
38
+ SpecialApps,
39
+ TDEngineSuperTables,
40
40
  TSDBTarget,
41
+ V3IOTSDBTables,
41
42
  VersionedModel,
42
43
  WriterEvent,
43
44
  WriterEventKind,
44
45
  )
45
46
  from .grafana import (
46
47
  GrafanaColumn,
48
+ GrafanaColumnType,
47
49
  GrafanaDataPoint,
48
50
  GrafanaNumberColumn,
49
51
  GrafanaStringColumn,
@@ -57,7 +59,10 @@ from .model_endpoints import (
57
59
  ModelEndpointList,
58
60
  ModelEndpointMetadata,
59
61
  ModelEndpointMonitoringMetric,
62
+ ModelEndpointMonitoringMetricNoData,
60
63
  ModelEndpointMonitoringMetricType,
64
+ ModelEndpointMonitoringMetricValues,
65
+ ModelEndpointMonitoringResultValues,
61
66
  ModelEndpointSpec,
62
67
  ModelEndpointStatus,
63
68
  )
@@ -81,6 +81,8 @@ class EventFieldType:
81
81
  DRIFT_DETECTED_THRESHOLD = "drift_detected_threshold"
82
82
  POSSIBLE_DRIFT_THRESHOLD = "possible_drift_threshold"
83
83
  SAMPLE_PARQUET_PATH = "sample_parquet_path"
84
+ TIME = "time"
85
+ TABLE_COLUMN = "table_column"
84
86
 
85
87
 
86
88
  class FeatureSetFeatures(MonitoringStrEnum):
@@ -156,10 +158,6 @@ class EventKeyMetrics:
156
158
  REAL_TIME = "real_time"
157
159
 
158
160
 
159
- class TimeSeriesConnector:
160
- TSDB = "tsdb"
161
-
162
-
163
161
  class ModelEndpointTarget:
164
162
  V3IO_NOSQL = "v3io-nosql"
165
163
  SQL = "sql"
@@ -171,6 +169,7 @@ class ProjectSecretKeys:
171
169
  PIPELINES_ACCESS_KEY = "MODEL_MONITORING_PIPELINES_ACCESS_KEY"
172
170
  KAFKA_BROKERS = "KAFKA_BROKERS"
173
171
  STREAM_PATH = "STREAM_PATH"
172
+ TSDB_CONNECTION = "TSDB_CONNECTION"
174
173
 
175
174
 
176
175
  class ModelMonitoringStoreKinds:
@@ -194,6 +193,7 @@ class FileTargetKind:
194
193
  APPS_PARQUET = "apps_parquet"
195
194
  LOG_STREAM = "log_stream"
196
195
  APP_RESULTS = "app_results"
196
+ APP_METRICS = "app_metrics"
197
197
  MONITORING_SCHEDULES = "monitoring_schedules"
198
198
  MONITORING_APPLICATION = "monitoring_application"
199
199
 
@@ -230,12 +230,18 @@ class MonitoringFunctionNames(MonitoringStrEnum):
230
230
  WRITER = "model-monitoring-writer"
231
231
 
232
232
 
233
- class MonitoringTSDBTables(MonitoringStrEnum):
233
+ class V3IOTSDBTables(MonitoringStrEnum):
234
234
  APP_RESULTS = "app-results"
235
235
  METRICS = "metrics"
236
236
  EVENTS = "events"
237
237
 
238
238
 
239
+ class TDEngineSuperTables(MonitoringStrEnum):
240
+ APP_RESULTS = "app_results"
241
+ METRICS = "metrics"
242
+ PREDICTIONS = "predictions"
243
+
244
+
239
245
  @dataclass
240
246
  class FunctionURI:
241
247
  project: str
@@ -312,6 +318,7 @@ class ResultKindApp(Enum):
312
318
  concept_drift = 1
313
319
  model_performance = 2
314
320
  system_performance = 3
321
+ custom = 4
315
322
 
316
323
 
317
324
  class ResultStatusApp(IntEnum):
@@ -339,12 +346,19 @@ class ControllerPolicy:
339
346
 
340
347
  class TSDBTarget:
341
348
  V3IO_TSDB = "v3io-tsdb"
349
+ TDEngine = "tdengine"
342
350
  PROMETHEUS = "prometheus"
343
- APP_RESULTS_TABLE = "app-results"
344
- V3IO_BE = "tsdb"
345
- V3IO_RATE = "1/s"
346
351
 
347
352
 
348
353
  class HistogramDataDriftApplicationConstants:
349
354
  NAME = "histogram-data-drift"
350
355
  GENERAL_RESULT_NAME = "general_drift"
356
+
357
+
358
+ class PredictionsQueryConstants:
359
+ DEFAULT_AGGREGATION_GRANULARITY = "10m"
360
+ INVOCATIONS = "invocations"
361
+
362
+
363
+ class SpecialApps:
364
+ MLRUN_INFRA = "mlrun-infra"