mlrun 1.10.0rc40__py3-none-any.whl → 1.11.0rc16__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (150) hide show
  1. mlrun/__init__.py +3 -2
  2. mlrun/__main__.py +0 -4
  3. mlrun/artifacts/dataset.py +2 -2
  4. mlrun/artifacts/plots.py +1 -1
  5. mlrun/{model_monitoring/db/tsdb/tdengine → auth}/__init__.py +2 -3
  6. mlrun/auth/nuclio.py +89 -0
  7. mlrun/auth/providers.py +429 -0
  8. mlrun/auth/utils.py +415 -0
  9. mlrun/common/constants.py +7 -0
  10. mlrun/common/model_monitoring/helpers.py +41 -4
  11. mlrun/common/runtimes/constants.py +28 -0
  12. mlrun/common/schemas/__init__.py +13 -3
  13. mlrun/common/schemas/alert.py +2 -2
  14. mlrun/common/schemas/api_gateway.py +3 -0
  15. mlrun/common/schemas/auth.py +10 -10
  16. mlrun/common/schemas/client_spec.py +4 -0
  17. mlrun/common/schemas/constants.py +25 -0
  18. mlrun/common/schemas/frontend_spec.py +1 -8
  19. mlrun/common/schemas/function.py +24 -0
  20. mlrun/common/schemas/hub.py +3 -2
  21. mlrun/common/schemas/model_monitoring/__init__.py +1 -1
  22. mlrun/common/schemas/model_monitoring/constants.py +2 -2
  23. mlrun/common/schemas/secret.py +17 -2
  24. mlrun/common/secrets.py +95 -1
  25. mlrun/common/types.py +10 -10
  26. mlrun/config.py +53 -15
  27. mlrun/data_types/infer.py +2 -2
  28. mlrun/datastore/__init__.py +2 -3
  29. mlrun/datastore/base.py +274 -10
  30. mlrun/datastore/datastore.py +1 -1
  31. mlrun/datastore/datastore_profile.py +49 -17
  32. mlrun/datastore/model_provider/huggingface_provider.py +6 -2
  33. mlrun/datastore/model_provider/model_provider.py +2 -2
  34. mlrun/datastore/model_provider/openai_provider.py +2 -2
  35. mlrun/datastore/s3.py +15 -16
  36. mlrun/datastore/sources.py +1 -1
  37. mlrun/datastore/store_resources.py +4 -4
  38. mlrun/datastore/storeytargets.py +16 -10
  39. mlrun/datastore/targets.py +1 -1
  40. mlrun/datastore/utils.py +16 -3
  41. mlrun/datastore/v3io.py +1 -1
  42. mlrun/db/base.py +36 -12
  43. mlrun/db/httpdb.py +316 -101
  44. mlrun/db/nopdb.py +29 -11
  45. mlrun/errors.py +4 -2
  46. mlrun/execution.py +11 -12
  47. mlrun/feature_store/api.py +1 -1
  48. mlrun/feature_store/common.py +1 -1
  49. mlrun/feature_store/feature_vector_utils.py +1 -1
  50. mlrun/feature_store/steps.py +8 -6
  51. mlrun/frameworks/_common/utils.py +3 -3
  52. mlrun/frameworks/_dl_common/loggers/logger.py +1 -1
  53. mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +2 -1
  54. mlrun/frameworks/_ml_common/loggers/mlrun_logger.py +1 -1
  55. mlrun/frameworks/_ml_common/utils.py +2 -1
  56. mlrun/frameworks/auto_mlrun/auto_mlrun.py +4 -3
  57. mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +2 -1
  58. mlrun/frameworks/onnx/dataset.py +2 -1
  59. mlrun/frameworks/onnx/mlrun_interface.py +2 -1
  60. mlrun/frameworks/pytorch/callbacks/logging_callback.py +5 -4
  61. mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +2 -1
  62. mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +2 -1
  63. mlrun/frameworks/pytorch/utils.py +2 -1
  64. mlrun/frameworks/sklearn/metric.py +2 -1
  65. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +5 -4
  66. mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +2 -1
  67. mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +2 -1
  68. mlrun/hub/__init__.py +37 -0
  69. mlrun/hub/base.py +142 -0
  70. mlrun/hub/module.py +67 -76
  71. mlrun/hub/step.py +113 -0
  72. mlrun/launcher/base.py +2 -1
  73. mlrun/launcher/local.py +2 -1
  74. mlrun/model.py +12 -2
  75. mlrun/model_monitoring/__init__.py +0 -1
  76. mlrun/model_monitoring/api.py +2 -2
  77. mlrun/model_monitoring/applications/base.py +20 -6
  78. mlrun/model_monitoring/applications/context.py +1 -0
  79. mlrun/model_monitoring/controller.py +7 -17
  80. mlrun/model_monitoring/db/_schedules.py +2 -16
  81. mlrun/model_monitoring/db/_stats.py +2 -13
  82. mlrun/model_monitoring/db/tsdb/__init__.py +9 -7
  83. mlrun/model_monitoring/db/tsdb/base.py +2 -4
  84. mlrun/model_monitoring/db/tsdb/preaggregate.py +234 -0
  85. mlrun/model_monitoring/db/tsdb/stream_graph_steps.py +63 -0
  86. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_metrics_queries.py +414 -0
  87. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_predictions_queries.py +376 -0
  88. mlrun/model_monitoring/db/tsdb/timescaledb/queries/timescaledb_results_queries.py +590 -0
  89. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connection.py +434 -0
  90. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_connector.py +541 -0
  91. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_operations.py +808 -0
  92. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_schema.py +502 -0
  93. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream.py +163 -0
  94. mlrun/model_monitoring/db/tsdb/timescaledb/timescaledb_stream_graph_steps.py +60 -0
  95. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_dataframe_processor.py +141 -0
  96. mlrun/model_monitoring/db/tsdb/timescaledb/utils/timescaledb_query_builder.py +585 -0
  97. mlrun/model_monitoring/db/tsdb/timescaledb/writer_graph_steps.py +73 -0
  98. mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +4 -6
  99. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +147 -79
  100. mlrun/model_monitoring/features_drift_table.py +2 -1
  101. mlrun/model_monitoring/helpers.py +2 -1
  102. mlrun/model_monitoring/stream_processing.py +18 -16
  103. mlrun/model_monitoring/writer.py +4 -3
  104. mlrun/package/__init__.py +2 -1
  105. mlrun/platforms/__init__.py +0 -44
  106. mlrun/platforms/iguazio.py +1 -1
  107. mlrun/projects/operations.py +11 -10
  108. mlrun/projects/project.py +81 -82
  109. mlrun/run.py +4 -7
  110. mlrun/runtimes/__init__.py +2 -204
  111. mlrun/runtimes/base.py +89 -21
  112. mlrun/runtimes/constants.py +225 -0
  113. mlrun/runtimes/daskjob.py +4 -2
  114. mlrun/runtimes/databricks_job/databricks_runtime.py +2 -1
  115. mlrun/runtimes/mounts.py +5 -0
  116. mlrun/runtimes/nuclio/__init__.py +12 -8
  117. mlrun/runtimes/nuclio/api_gateway.py +36 -6
  118. mlrun/runtimes/nuclio/application/application.py +200 -32
  119. mlrun/runtimes/nuclio/function.py +154 -49
  120. mlrun/runtimes/nuclio/serving.py +55 -42
  121. mlrun/runtimes/pod.py +59 -10
  122. mlrun/secrets.py +46 -2
  123. mlrun/serving/__init__.py +2 -0
  124. mlrun/serving/remote.py +5 -5
  125. mlrun/serving/routers.py +3 -3
  126. mlrun/serving/server.py +46 -43
  127. mlrun/serving/serving_wrapper.py +6 -2
  128. mlrun/serving/states.py +554 -207
  129. mlrun/serving/steps.py +1 -1
  130. mlrun/serving/system_steps.py +42 -33
  131. mlrun/track/trackers/mlflow_tracker.py +29 -31
  132. mlrun/utils/helpers.py +89 -16
  133. mlrun/utils/http.py +9 -2
  134. mlrun/utils/notifications/notification/git.py +1 -1
  135. mlrun/utils/notifications/notification/mail.py +39 -16
  136. mlrun/utils/notifications/notification_pusher.py +2 -2
  137. mlrun/utils/version/version.json +2 -2
  138. mlrun/utils/version/version.py +3 -4
  139. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/METADATA +39 -49
  140. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/RECORD +144 -130
  141. mlrun/db/auth_utils.py +0 -152
  142. mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +0 -343
  143. mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +0 -75
  144. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connection.py +0 -281
  145. mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +0 -1368
  146. mlrun/model_monitoring/db/tsdb/tdengine/writer_graph_steps.py +0 -51
  147. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/WHEEL +0 -0
  148. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/entry_points.txt +0 -0
  149. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/licenses/LICENSE +0 -0
  150. {mlrun-1.10.0rc40.dist-info → mlrun-1.11.0rc16.dist-info}/top_level.txt +0 -0
mlrun/db/auth_utils.py DELETED
@@ -1,152 +0,0 @@
1
- # Copyright 2024 Iguazio
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- from abc import ABC, abstractmethod
16
- from datetime import datetime, timedelta
17
-
18
- import requests
19
-
20
- import mlrun.errors
21
- from mlrun.utils import logger
22
-
23
-
24
- class TokenProvider(ABC):
25
- @abstractmethod
26
- def get_token(self):
27
- pass
28
-
29
- @abstractmethod
30
- def is_iguazio_session(self):
31
- pass
32
-
33
-
34
- class StaticTokenProvider(TokenProvider):
35
- def __init__(self, token: str):
36
- self.token = token
37
-
38
- def get_token(self):
39
- return self.token
40
-
41
- def is_iguazio_session(self):
42
- return mlrun.platforms.iguazio.is_iguazio_session(self.token)
43
-
44
-
45
- class OAuthClientIDTokenProvider(TokenProvider):
46
- def __init__(
47
- self, token_endpoint: str, client_id: str, client_secret: str, timeout=5
48
- ):
49
- if not token_endpoint or not client_id or not client_secret:
50
- raise mlrun.errors.MLRunValueError(
51
- "Invalid client_id configuration for authentication. Must provide token endpoint, client-id and secret"
52
- )
53
- self.token_endpoint = token_endpoint
54
- self.client_id = client_id
55
- self.client_secret = client_secret
56
- self.timeout = timeout
57
-
58
- # Since we're only issuing POST requests, which are actually a disguised GET, then it's ok to allow retries
59
- # on them.
60
- self._session = mlrun.utils.HTTPSessionWithRetry(
61
- retry_on_post=True,
62
- verbose=True,
63
- )
64
-
65
- self._cleanup()
66
- self._refresh_token_if_needed()
67
-
68
- def get_token(self):
69
- self._refresh_token_if_needed()
70
- return self.token
71
-
72
- def is_iguazio_session(self):
73
- return False
74
-
75
- def _cleanup(self):
76
- self.token = self.token_expiry_time = self.token_refresh_time = None
77
-
78
- def _refresh_token_if_needed(self):
79
- now = datetime.now()
80
- if self.token:
81
- if self.token_refresh_time and now <= self.token_refresh_time:
82
- return self.token
83
-
84
- # We only cleanup if token was really expired - even if we fail in refreshing the token, we can still
85
- # use the existing one given that it's not expired.
86
- if now >= self.token_expiry_time:
87
- self._cleanup()
88
-
89
- self._issue_token_request()
90
- return self.token
91
-
92
- def _issue_token_request(self, raise_on_error=False):
93
- try:
94
- headers = {"Content-Type": "application/x-www-form-urlencoded"}
95
- request_body = {
96
- "grant_type": "client_credentials",
97
- "client_id": self.client_id,
98
- "client_secret": self.client_secret,
99
- }
100
- response = self._session.request(
101
- "POST",
102
- self.token_endpoint,
103
- timeout=self.timeout,
104
- headers=headers,
105
- data=request_body,
106
- )
107
- except requests.RequestException as exc:
108
- error = f"Retrieving token failed: {mlrun.errors.err_to_str(exc)}"
109
- if raise_on_error:
110
- raise mlrun.errors.MLRunRuntimeError(error) from exc
111
- else:
112
- logger.warning(error)
113
- return
114
-
115
- if not response.ok:
116
- error = "No error available"
117
- if response.content:
118
- try:
119
- data = response.json()
120
- error = data.get("error")
121
- except Exception:
122
- pass
123
- logger.warning(
124
- "Retrieving token failed", status=response.status_code, error=error
125
- )
126
- if raise_on_error:
127
- mlrun.errors.raise_for_status(response)
128
- return
129
-
130
- self._parse_response(response.json())
131
-
132
- def _parse_response(self, data: dict):
133
- # Response is described in https://datatracker.ietf.org/doc/html/rfc6749#section-4.4.3
134
- # According to spec, there isn't a refresh token - just the access token and its expiry time (in seconds).
135
- self.token = data.get("access_token")
136
- expires_in = data.get("expires_in")
137
- if not self.token or not expires_in:
138
- token_str = "****" if self.token else "missing"
139
- logger.warning(
140
- "Failed to parse token response", token=token_str, expires_in=expires_in
141
- )
142
- return
143
-
144
- now = datetime.now()
145
- self.token_expiry_time = now + timedelta(seconds=expires_in)
146
- self.token_refresh_time = now + timedelta(seconds=expires_in / 2)
147
- logger.info(
148
- "Successfully retrieved client-id token",
149
- expires_in=expires_in,
150
- expiry=str(self.token_expiry_time),
151
- refresh=str(self.token_refresh_time),
152
- )
@@ -1,343 +0,0 @@
1
- # Copyright 2024 Iguazio
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import datetime
16
- from dataclasses import dataclass
17
- from io import StringIO
18
- from typing import Optional, Union
19
-
20
- import taosws
21
-
22
- import mlrun.common.schemas.model_monitoring as mm_schemas
23
- import mlrun.common.types
24
-
25
- _MODEL_MONITORING_DATABASE = "mlrun_model_monitoring"
26
-
27
-
28
- class _TDEngineColumnType:
29
- def __init__(self, data_type: str, length: Optional[int] = None):
30
- self.data_type = data_type
31
- self.length = length
32
-
33
- def values_to_column(self, values):
34
- raise NotImplementedError()
35
-
36
- def __str__(self):
37
- if self.length is not None:
38
- return f"{self.data_type}({self.length})"
39
- else:
40
- return self.data_type
41
-
42
-
43
- class _TDEngineColumn(mlrun.common.types.StrEnum):
44
- TIMESTAMP = _TDEngineColumnType("TIMESTAMP")
45
- FLOAT = _TDEngineColumnType("FLOAT")
46
- INT = _TDEngineColumnType("INT")
47
- BINARY_40 = _TDEngineColumnType("BINARY", 40)
48
- BINARY_64 = _TDEngineColumnType("BINARY", 64)
49
- BINARY_1000 = _TDEngineColumnType("BINARY", 1000)
50
-
51
-
52
- def values_to_column(values, column_type):
53
- if column_type == _TDEngineColumn.TIMESTAMP:
54
- timestamps = [round(timestamp.timestamp() * 1000) for timestamp in values]
55
- return taosws.millis_timestamps_to_column(timestamps)
56
- if column_type == _TDEngineColumn.FLOAT:
57
- return taosws.floats_to_column(values)
58
- if column_type == _TDEngineColumn.INT:
59
- return taosws.ints_to_column(values)
60
- if column_type == _TDEngineColumn.BINARY_40:
61
- return taosws.binary_to_column(values)
62
- if column_type == _TDEngineColumn.BINARY_64:
63
- return taosws.binary_to_column(values)
64
- if column_type == _TDEngineColumn.BINARY_1000:
65
- return taosws.binary_to_column(values)
66
-
67
- raise mlrun.errors.MLRunInvalidArgumentError(
68
- f"unsupported column type '{column_type}'"
69
- )
70
-
71
-
72
- @dataclass
73
- class TDEngineSchema:
74
- """
75
- A class to represent a supertable schema in TDengine. Using this schema, you can generate the relevant queries to
76
- create, insert, delete and query data from TDengine. At the moment, there are 3 schemas: AppResultTable,
77
- Metrics, and Predictions.
78
- """
79
-
80
- def __init__(
81
- self,
82
- super_table: str,
83
- columns: dict[str, _TDEngineColumn],
84
- tags: dict[str, str],
85
- project: str,
86
- database: Optional[str] = None,
87
- ):
88
- self.super_table = f"{super_table}_{project.replace('-', '_')}"
89
- self.columns = columns
90
- self.tags = tags
91
- self.database = database or _MODEL_MONITORING_DATABASE
92
-
93
- def _create_super_table_query(self) -> str:
94
- columns = ", ".join(f"{col} {val}" for col, val in self.columns.items())
95
- tags = ", ".join(f"{col} {val}" for col, val in self.tags.items())
96
- return f"CREATE STABLE if NOT EXISTS {self.database}.{self.super_table} ({columns}) TAGS ({tags});"
97
-
98
- def _create_subtable_sql(
99
- self,
100
- subtable: str,
101
- values: dict[str, Union[str, int, float, datetime.datetime]],
102
- ) -> str:
103
- try:
104
- tags = ", ".join(f"'{values[val]}'" for val in self.tags)
105
- except KeyError:
106
- raise mlrun.errors.MLRunInvalidArgumentError(
107
- f"values must contain all tags: {self.tags.keys()}"
108
- )
109
- return f"CREATE TABLE if NOT EXISTS {self.database}.{subtable} USING {self.super_table} TAGS ({tags});"
110
-
111
- def _delete_subtable_query(
112
- self,
113
- subtable: str,
114
- values: dict[str, Union[str, int, float, datetime.datetime]],
115
- ) -> str:
116
- values = " AND ".join(
117
- f"{val} LIKE '{values[val]}'" for val in self.tags if val in values
118
- )
119
- if not values:
120
- raise mlrun.errors.MLRunInvalidArgumentError(
121
- f"values must contain at least one tag: {self.tags.keys()}"
122
- )
123
- return f"DELETE FROM {self.database}.{subtable} WHERE {values};"
124
-
125
- def drop_subtable_query(self, subtable: str) -> str:
126
- return f"DROP TABLE if EXISTS {self.database}.`{subtable}`;"
127
-
128
- def drop_supertable_query(self) -> str:
129
- return f"DROP STABLE if EXISTS {self.database}.{self.super_table};"
130
-
131
- def _get_subtables_query_by_tag(
132
- self,
133
- filter_tag: str,
134
- filter_values: list[str],
135
- operator: str = "OR",
136
- ) -> str:
137
- if filter_tag not in self.tags:
138
- raise mlrun.errors.MLRunInvalidArgumentError(
139
- f"`filter_tag` must be one of the tags: {self.tags.keys()}"
140
- )
141
-
142
- values = f" {operator} ".join(
143
- f"{filter_tag} LIKE '{val}'" for val in filter_values
144
- )
145
- return self._get_tables_query_by_condition(values)
146
-
147
- def _get_tables_query_by_condition(self, condition: str) -> str:
148
- return f"SELECT DISTINCT TBNAME FROM {self.database}.{self.super_table} WHERE {condition};"
149
-
150
- @staticmethod
151
- def _get_records_query(
152
- table: str,
153
- start: datetime.datetime,
154
- end: datetime.datetime,
155
- columns_to_filter: Optional[list[str]] = None,
156
- filter_query: Optional[str] = None,
157
- interval: Optional[str] = None,
158
- limit: int = 0,
159
- agg_funcs: Optional[list] = None,
160
- sliding_window_step: Optional[str] = None,
161
- timestamp_column: str = "time",
162
- database: str = _MODEL_MONITORING_DATABASE,
163
- group_by: Optional[Union[list[str], str]] = None,
164
- preform_agg_funcs_columns: Optional[list[str]] = None,
165
- order_by: Optional[str] = None,
166
- desc: Optional[bool] = None,
167
- partition_by: Optional[str] = None,
168
- ) -> str:
169
- if agg_funcs and not columns_to_filter:
170
- raise mlrun.errors.MLRunInvalidArgumentError(
171
- "`columns_to_filter` must be provided when using aggregate functions"
172
- )
173
-
174
- # if aggregate function or interval is provided, the other must be provided as well
175
- if interval and not agg_funcs:
176
- raise mlrun.errors.MLRunInvalidArgumentError(
177
- "`agg_funcs` must be provided when using interval"
178
- )
179
- if partition_by and not agg_funcs:
180
- raise mlrun.errors.MLRunInvalidArgumentError(
181
- "`agg_funcs` must be provided when using partition by"
182
- )
183
- if sliding_window_step and not interval:
184
- raise mlrun.errors.MLRunInvalidArgumentError(
185
- "`interval` must be provided when using sliding window"
186
- )
187
- if group_by and not agg_funcs:
188
- raise mlrun.errors.MLRunInvalidArgumentError(
189
- "aggregate functions must be provided when using group by"
190
- )
191
- if desc and not order_by:
192
- raise mlrun.errors.MLRunInvalidArgumentError(
193
- "`order_by` must be provided when using descending"
194
- )
195
-
196
- with StringIO() as query:
197
- query.write("SELECT ")
198
- if interval:
199
- query.write("_wstart, _wend, ")
200
- if agg_funcs:
201
- preform_agg_funcs_columns = (
202
- columns_to_filter
203
- if preform_agg_funcs_columns is None
204
- else preform_agg_funcs_columns
205
- )
206
- query.write(
207
- ", ".join(
208
- [
209
- f"{a}({col})"
210
- if col.upper()
211
- in map(
212
- str.upper, preform_agg_funcs_columns
213
- ) # Case-insensitive check
214
- else f"{col}"
215
- for a in agg_funcs
216
- for col in columns_to_filter
217
- ]
218
- )
219
- )
220
- elif columns_to_filter:
221
- query.write(", ".join(columns_to_filter))
222
- else:
223
- query.write("*")
224
- query.write(f" FROM {database}.{table}")
225
-
226
- if any([filter_query, start, end]):
227
- query.write(" WHERE ")
228
- if filter_query:
229
- query.write(f"{filter_query} AND ")
230
- if start:
231
- query.write(f"{timestamp_column} >= '{start}' AND ")
232
- if end:
233
- query.write(f"{timestamp_column} <= '{end}'")
234
- if group_by:
235
- if isinstance(group_by, list):
236
- group_by = ", ".join(group_by)
237
- query.write(f" GROUP BY {group_by}")
238
- if partition_by:
239
- query.write(f" PARTITION BY {partition_by}")
240
- if order_by:
241
- desc = " DESC" if desc else ""
242
- query.write(f" ORDER BY {order_by}{desc}")
243
- if interval:
244
- query.write(f" INTERVAL({interval})")
245
- if sliding_window_step:
246
- query.write(f" SLIDING({sliding_window_step})")
247
- if limit:
248
- query.write(f" LIMIT {limit}")
249
- query.write(";")
250
- return query.getvalue()
251
-
252
-
253
- @dataclass
254
- class AppResultTable(TDEngineSchema):
255
- def __init__(self, project: str, database: Optional[str] = None):
256
- super_table = mm_schemas.TDEngineSuperTables.APP_RESULTS
257
- columns = {
258
- mm_schemas.WriterEvent.END_INFER_TIME: _TDEngineColumn.TIMESTAMP,
259
- mm_schemas.WriterEvent.START_INFER_TIME: _TDEngineColumn.TIMESTAMP,
260
- mm_schemas.ResultData.RESULT_VALUE: _TDEngineColumn.FLOAT,
261
- mm_schemas.ResultData.RESULT_STATUS: _TDEngineColumn.INT,
262
- mm_schemas.ResultData.RESULT_EXTRA_DATA: _TDEngineColumn.BINARY_1000,
263
- }
264
- tags = {
265
- mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
266
- mm_schemas.WriterEvent.APPLICATION_NAME: _TDEngineColumn.BINARY_64,
267
- mm_schemas.ResultData.RESULT_NAME: _TDEngineColumn.BINARY_64,
268
- mm_schemas.ResultData.RESULT_KIND: _TDEngineColumn.INT,
269
- }
270
- super().__init__(
271
- super_table=super_table,
272
- columns=columns,
273
- tags=tags,
274
- database=database,
275
- project=project,
276
- )
277
-
278
-
279
- @dataclass
280
- class Metrics(TDEngineSchema):
281
- def __init__(self, project: str, database: Optional[str] = None):
282
- super_table = mm_schemas.TDEngineSuperTables.METRICS
283
- columns = {
284
- mm_schemas.WriterEvent.END_INFER_TIME: _TDEngineColumn.TIMESTAMP,
285
- mm_schemas.WriterEvent.START_INFER_TIME: _TDEngineColumn.TIMESTAMP,
286
- mm_schemas.MetricData.METRIC_VALUE: _TDEngineColumn.FLOAT,
287
- }
288
- tags = {
289
- mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
290
- mm_schemas.WriterEvent.APPLICATION_NAME: _TDEngineColumn.BINARY_64,
291
- mm_schemas.MetricData.METRIC_NAME: _TDEngineColumn.BINARY_64,
292
- }
293
- super().__init__(
294
- super_table=super_table,
295
- columns=columns,
296
- tags=tags,
297
- database=database,
298
- project=project,
299
- )
300
-
301
-
302
- @dataclass
303
- class Predictions(TDEngineSchema):
304
- def __init__(self, project: str, database: Optional[str] = None):
305
- super_table = mm_schemas.TDEngineSuperTables.PREDICTIONS
306
- columns = {
307
- mm_schemas.EventFieldType.TIME: _TDEngineColumn.TIMESTAMP,
308
- mm_schemas.EventFieldType.LATENCY: _TDEngineColumn.FLOAT,
309
- mm_schemas.EventKeyMetrics.CUSTOM_METRICS: _TDEngineColumn.BINARY_1000,
310
- mm_schemas.EventFieldType.ESTIMATED_PREDICTION_COUNT: _TDEngineColumn.FLOAT,
311
- mm_schemas.EventFieldType.EFFECTIVE_SAMPLE_COUNT: _TDEngineColumn.INT,
312
- }
313
- tags = {
314
- mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
315
- }
316
- super().__init__(
317
- super_table=super_table,
318
- columns=columns,
319
- tags=tags,
320
- database=database,
321
- project=project,
322
- )
323
-
324
-
325
- @dataclass
326
- class Errors(TDEngineSchema):
327
- def __init__(self, project: str, database: Optional[str] = None):
328
- super_table = mm_schemas.TDEngineSuperTables.ERRORS
329
- columns = {
330
- mm_schemas.EventFieldType.TIME: _TDEngineColumn.TIMESTAMP,
331
- mm_schemas.EventFieldType.MODEL_ERROR: _TDEngineColumn.BINARY_1000,
332
- }
333
- tags = {
334
- mm_schemas.WriterEvent.ENDPOINT_ID: _TDEngineColumn.BINARY_64,
335
- mm_schemas.EventFieldType.ERROR_TYPE: _TDEngineColumn.BINARY_64,
336
- }
337
- super().__init__(
338
- super_table=super_table,
339
- columns=columns,
340
- tags=tags,
341
- database=database,
342
- project=project,
343
- )
@@ -1,75 +0,0 @@
1
- # Copyright 2024 Iguazio
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
-
15
- import json
16
- from datetime import datetime
17
-
18
- import mlrun.feature_store.steps
19
- from mlrun.common.schemas.model_monitoring import (
20
- EventFieldType,
21
- EventKeyMetrics,
22
- )
23
- from mlrun.utils import logger
24
-
25
-
26
- class ProcessBeforeTDEngine(mlrun.feature_store.steps.MapClass):
27
- def __init__(self, **kwargs):
28
- """
29
- Process the data before writing to TDEngine. This step create the relevant keys for the TDEngine table,
30
- including project name, custom metrics, time column, and table name column.
31
-
32
- :returns: Event as a dictionary which will be written into the TDEngine Predictions table.
33
- """
34
- super().__init__(**kwargs)
35
-
36
- def do(self, event):
37
- event[EventFieldType.PROJECT] = event[EventFieldType.FUNCTION_URI].split("/")[0]
38
- event[EventKeyMetrics.CUSTOM_METRICS] = json.dumps(
39
- event.get(EventFieldType.METRICS, {})
40
- )
41
- event[EventFieldType.TIME] = event.get(EventFieldType.TIMESTAMP)
42
- event[EventFieldType.TABLE_COLUMN] = "_" + event.get(EventFieldType.ENDPOINT_ID)
43
-
44
- return event
45
-
46
-
47
- class ErrorExtractor(mlrun.feature_store.steps.MapClass):
48
- def __init__(self, **kwargs):
49
- """
50
- Prepare the event for insertion into the TDEngine error table
51
- """
52
- super().__init__(**kwargs)
53
-
54
- def do(self, event):
55
- error = str(event.get("error"))
56
- if len(error) > 1000:
57
- error = error[-1000:]
58
- logger.warning(
59
- f"Error message exceeds 1000 chars: The error message writen to TSDB will be it last "
60
- f"1000 chars, Error: {error}",
61
- event=event,
62
- )
63
- timestamp = datetime.fromisoformat(event.get("when"))
64
- endpoint_id = event[EventFieldType.ENDPOINT_ID]
65
- event = {
66
- EventFieldType.MODEL_ERROR: error,
67
- EventFieldType.ERROR_TYPE: EventFieldType.INFER_ERROR,
68
- EventFieldType.ENDPOINT_ID: endpoint_id,
69
- EventFieldType.TIME: timestamp,
70
- EventFieldType.PROJECT: event[EventFieldType.FUNCTION_URI].split("/")[0],
71
- EventFieldType.TABLE_COLUMN: "_err_"
72
- + event.get(EventFieldType.ENDPOINT_ID),
73
- }
74
- logger.info("Write error to errors TSDB table", event=event)
75
- return event