mlrun 1.7.1rc4__py3-none-any.whl → 1.8.0rc8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/__init__.py +23 -21
- mlrun/__main__.py +3 -3
- mlrun/alerts/alert.py +148 -14
- mlrun/artifacts/__init__.py +1 -2
- mlrun/artifacts/base.py +46 -12
- mlrun/artifacts/dataset.py +16 -16
- mlrun/artifacts/document.py +334 -0
- mlrun/artifacts/manager.py +15 -13
- mlrun/artifacts/model.py +66 -53
- mlrun/common/constants.py +7 -0
- mlrun/common/formatters/__init__.py +1 -0
- mlrun/common/formatters/feature_set.py +1 -0
- mlrun/common/formatters/function.py +1 -0
- mlrun/{model_monitoring/db/stores/base/__init__.py → common/formatters/model_endpoint.py} +16 -1
- mlrun/common/formatters/pipeline.py +1 -2
- mlrun/common/formatters/project.py +9 -0
- mlrun/common/model_monitoring/__init__.py +0 -5
- mlrun/common/model_monitoring/helpers.py +1 -29
- mlrun/common/runtimes/constants.py +1 -2
- mlrun/common/schemas/__init__.py +6 -2
- mlrun/common/schemas/alert.py +111 -19
- mlrun/common/schemas/api_gateway.py +3 -3
- mlrun/common/schemas/artifact.py +11 -7
- mlrun/common/schemas/auth.py +6 -4
- mlrun/common/schemas/background_task.py +7 -7
- mlrun/common/schemas/client_spec.py +2 -3
- mlrun/common/schemas/clusterization_spec.py +2 -2
- mlrun/common/schemas/common.py +53 -3
- mlrun/common/schemas/constants.py +15 -0
- mlrun/common/schemas/datastore_profile.py +1 -1
- mlrun/common/schemas/feature_store.py +9 -9
- mlrun/common/schemas/frontend_spec.py +4 -4
- mlrun/common/schemas/function.py +10 -10
- mlrun/common/schemas/hub.py +1 -1
- mlrun/common/schemas/k8s.py +3 -3
- mlrun/common/schemas/memory_reports.py +3 -3
- mlrun/common/schemas/model_monitoring/__init__.py +2 -1
- mlrun/common/schemas/model_monitoring/constants.py +66 -14
- mlrun/common/schemas/model_monitoring/grafana.py +1 -1
- mlrun/common/schemas/model_monitoring/model_endpoints.py +91 -147
- mlrun/common/schemas/notification.py +24 -3
- mlrun/common/schemas/object.py +1 -1
- mlrun/common/schemas/pagination.py +4 -4
- mlrun/common/schemas/partition.py +137 -0
- mlrun/common/schemas/pipeline.py +2 -2
- mlrun/common/schemas/project.py +25 -17
- mlrun/common/schemas/runs.py +2 -2
- mlrun/common/schemas/runtime_resource.py +5 -5
- mlrun/common/schemas/schedule.py +1 -1
- mlrun/common/schemas/secret.py +1 -1
- mlrun/common/schemas/tag.py +3 -3
- mlrun/common/schemas/workflow.py +5 -5
- mlrun/config.py +67 -10
- mlrun/data_types/__init__.py +0 -2
- mlrun/data_types/infer.py +3 -1
- mlrun/data_types/spark.py +2 -1
- mlrun/datastore/__init__.py +0 -2
- mlrun/datastore/alibaba_oss.py +4 -1
- mlrun/datastore/azure_blob.py +4 -1
- mlrun/datastore/base.py +12 -4
- mlrun/datastore/datastore.py +9 -3
- mlrun/datastore/datastore_profile.py +79 -20
- mlrun/datastore/dbfs_store.py +4 -1
- mlrun/datastore/filestore.py +4 -1
- mlrun/datastore/google_cloud_storage.py +4 -1
- mlrun/datastore/hdfs.py +4 -1
- mlrun/datastore/inmem.py +4 -1
- mlrun/datastore/redis.py +4 -1
- mlrun/datastore/s3.py +4 -1
- mlrun/datastore/sources.py +52 -51
- mlrun/datastore/store_resources.py +0 -2
- mlrun/datastore/targets.py +21 -21
- mlrun/datastore/utils.py +2 -2
- mlrun/datastore/v3io.py +4 -1
- mlrun/datastore/vectorstore.py +194 -0
- mlrun/datastore/wasbfs/fs.py +13 -12
- mlrun/db/base.py +208 -82
- mlrun/db/factory.py +0 -3
- mlrun/db/httpdb.py +1237 -386
- mlrun/db/nopdb.py +201 -74
- mlrun/errors.py +2 -2
- mlrun/execution.py +136 -50
- mlrun/feature_store/__init__.py +0 -2
- mlrun/feature_store/api.py +41 -40
- mlrun/feature_store/common.py +9 -9
- mlrun/feature_store/feature_set.py +20 -18
- mlrun/feature_store/feature_vector.py +27 -24
- mlrun/feature_store/retrieval/base.py +14 -9
- mlrun/feature_store/retrieval/job.py +2 -1
- mlrun/feature_store/steps.py +2 -2
- mlrun/features.py +30 -13
- mlrun/frameworks/__init__.py +1 -2
- mlrun/frameworks/_common/__init__.py +1 -2
- mlrun/frameworks/_common/artifacts_library.py +2 -2
- mlrun/frameworks/_common/mlrun_interface.py +10 -6
- mlrun/frameworks/_common/model_handler.py +29 -27
- mlrun/frameworks/_common/producer.py +3 -1
- mlrun/frameworks/_dl_common/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_dl_common/loggers/mlrun_logger.py +4 -4
- mlrun/frameworks/_dl_common/loggers/tensorboard_logger.py +3 -3
- mlrun/frameworks/_ml_common/__init__.py +1 -2
- mlrun/frameworks/_ml_common/loggers/__init__.py +1 -2
- mlrun/frameworks/_ml_common/model_handler.py +21 -21
- mlrun/frameworks/_ml_common/plans/__init__.py +1 -2
- mlrun/frameworks/_ml_common/plans/confusion_matrix_plan.py +3 -1
- mlrun/frameworks/_ml_common/plans/dataset_plan.py +3 -3
- mlrun/frameworks/_ml_common/plans/roc_curve_plan.py +4 -4
- mlrun/frameworks/auto_mlrun/__init__.py +1 -2
- mlrun/frameworks/auto_mlrun/auto_mlrun.py +22 -15
- mlrun/frameworks/huggingface/__init__.py +1 -2
- mlrun/frameworks/huggingface/model_server.py +9 -9
- mlrun/frameworks/lgbm/__init__.py +47 -44
- mlrun/frameworks/lgbm/callbacks/__init__.py +1 -2
- mlrun/frameworks/lgbm/callbacks/logging_callback.py +4 -2
- mlrun/frameworks/lgbm/callbacks/mlrun_logging_callback.py +4 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/__init__.py +1 -2
- mlrun/frameworks/lgbm/mlrun_interfaces/mlrun_interface.py +5 -5
- mlrun/frameworks/lgbm/model_handler.py +15 -11
- mlrun/frameworks/lgbm/model_server.py +11 -7
- mlrun/frameworks/lgbm/utils.py +2 -2
- mlrun/frameworks/onnx/__init__.py +1 -2
- mlrun/frameworks/onnx/dataset.py +3 -3
- mlrun/frameworks/onnx/mlrun_interface.py +2 -2
- mlrun/frameworks/onnx/model_handler.py +7 -5
- mlrun/frameworks/onnx/model_server.py +8 -6
- mlrun/frameworks/parallel_coordinates.py +11 -11
- mlrun/frameworks/pytorch/__init__.py +22 -23
- mlrun/frameworks/pytorch/callbacks/__init__.py +1 -2
- mlrun/frameworks/pytorch/callbacks/callback.py +2 -1
- mlrun/frameworks/pytorch/callbacks/logging_callback.py +15 -8
- mlrun/frameworks/pytorch/callbacks/mlrun_logging_callback.py +19 -12
- mlrun/frameworks/pytorch/callbacks/tensorboard_logging_callback.py +22 -15
- mlrun/frameworks/pytorch/callbacks_handler.py +36 -30
- mlrun/frameworks/pytorch/mlrun_interface.py +17 -17
- mlrun/frameworks/pytorch/model_handler.py +21 -17
- mlrun/frameworks/pytorch/model_server.py +13 -9
- mlrun/frameworks/sklearn/__init__.py +19 -18
- mlrun/frameworks/sklearn/estimator.py +2 -2
- mlrun/frameworks/sklearn/metric.py +3 -3
- mlrun/frameworks/sklearn/metrics_library.py +8 -6
- mlrun/frameworks/sklearn/mlrun_interface.py +3 -2
- mlrun/frameworks/sklearn/model_handler.py +4 -3
- mlrun/frameworks/tf_keras/__init__.py +11 -12
- mlrun/frameworks/tf_keras/callbacks/__init__.py +1 -2
- mlrun/frameworks/tf_keras/callbacks/logging_callback.py +17 -14
- mlrun/frameworks/tf_keras/callbacks/mlrun_logging_callback.py +15 -12
- mlrun/frameworks/tf_keras/callbacks/tensorboard_logging_callback.py +21 -18
- mlrun/frameworks/tf_keras/model_handler.py +17 -13
- mlrun/frameworks/tf_keras/model_server.py +12 -8
- mlrun/frameworks/xgboost/__init__.py +19 -18
- mlrun/frameworks/xgboost/model_handler.py +13 -9
- mlrun/launcher/base.py +3 -4
- mlrun/launcher/local.py +1 -1
- mlrun/launcher/remote.py +1 -1
- mlrun/lists.py +4 -3
- mlrun/model.py +117 -46
- mlrun/model_monitoring/__init__.py +4 -4
- mlrun/model_monitoring/api.py +61 -59
- mlrun/model_monitoring/applications/_application_steps.py +17 -17
- mlrun/model_monitoring/applications/base.py +165 -6
- mlrun/model_monitoring/applications/context.py +88 -37
- mlrun/model_monitoring/applications/evidently_base.py +1 -2
- mlrun/model_monitoring/applications/histogram_data_drift.py +43 -21
- mlrun/model_monitoring/applications/results.py +55 -3
- mlrun/model_monitoring/controller.py +207 -239
- mlrun/model_monitoring/db/__init__.py +0 -2
- mlrun/model_monitoring/db/_schedules.py +156 -0
- mlrun/model_monitoring/db/_stats.py +189 -0
- mlrun/model_monitoring/db/tsdb/base.py +78 -25
- mlrun/model_monitoring/db/tsdb/tdengine/schemas.py +90 -16
- mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py +33 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +279 -59
- mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py +1 -0
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +78 -17
- mlrun/model_monitoring/helpers.py +152 -49
- mlrun/model_monitoring/stream_processing.py +99 -283
- mlrun/model_monitoring/tracking_policy.py +10 -3
- mlrun/model_monitoring/writer.py +48 -36
- mlrun/package/__init__.py +3 -6
- mlrun/package/context_handler.py +1 -1
- mlrun/package/packager.py +12 -9
- mlrun/package/packagers/__init__.py +0 -2
- mlrun/package/packagers/default_packager.py +14 -11
- mlrun/package/packagers/numpy_packagers.py +16 -7
- mlrun/package/packagers/pandas_packagers.py +18 -18
- mlrun/package/packagers/python_standard_library_packagers.py +25 -11
- mlrun/package/packagers_manager.py +31 -14
- mlrun/package/utils/__init__.py +0 -3
- mlrun/package/utils/_pickler.py +6 -6
- mlrun/platforms/__init__.py +47 -16
- mlrun/platforms/iguazio.py +4 -1
- mlrun/projects/operations.py +27 -27
- mlrun/projects/pipelines.py +75 -38
- mlrun/projects/project.py +865 -206
- mlrun/run.py +53 -10
- mlrun/runtimes/__init__.py +1 -3
- mlrun/runtimes/base.py +15 -11
- mlrun/runtimes/daskjob.py +9 -9
- mlrun/runtimes/generators.py +2 -1
- mlrun/runtimes/kubejob.py +4 -5
- mlrun/runtimes/mounts.py +572 -0
- mlrun/runtimes/mpijob/__init__.py +0 -2
- mlrun/runtimes/mpijob/abstract.py +7 -6
- mlrun/runtimes/nuclio/api_gateway.py +7 -7
- mlrun/runtimes/nuclio/application/application.py +11 -11
- mlrun/runtimes/nuclio/function.py +19 -17
- mlrun/runtimes/nuclio/serving.py +18 -11
- mlrun/runtimes/pod.py +154 -45
- mlrun/runtimes/remotesparkjob.py +3 -2
- mlrun/runtimes/sparkjob/__init__.py +0 -2
- mlrun/runtimes/sparkjob/spark3job.py +21 -11
- mlrun/runtimes/utils.py +6 -5
- mlrun/serving/merger.py +6 -4
- mlrun/serving/remote.py +18 -17
- mlrun/serving/routers.py +185 -172
- mlrun/serving/server.py +7 -1
- mlrun/serving/states.py +97 -78
- mlrun/serving/utils.py +13 -2
- mlrun/serving/v1_serving.py +3 -2
- mlrun/serving/v2_serving.py +74 -65
- mlrun/track/__init__.py +1 -1
- mlrun/track/tracker.py +2 -2
- mlrun/track/trackers/mlflow_tracker.py +6 -5
- mlrun/utils/async_http.py +1 -1
- mlrun/utils/clones.py +1 -1
- mlrun/utils/helpers.py +66 -18
- mlrun/utils/logger.py +106 -4
- mlrun/utils/notifications/notification/__init__.py +22 -19
- mlrun/utils/notifications/notification/base.py +33 -14
- mlrun/utils/notifications/notification/console.py +6 -6
- mlrun/utils/notifications/notification/git.py +11 -11
- mlrun/utils/notifications/notification/ipython.py +10 -9
- mlrun/utils/notifications/notification/mail.py +176 -0
- mlrun/utils/notifications/notification/slack.py +6 -6
- mlrun/utils/notifications/notification/webhook.py +6 -6
- mlrun/utils/notifications/notification_pusher.py +86 -44
- mlrun/utils/regex.py +3 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/METADATA +191 -186
- mlrun-1.8.0rc8.dist-info/RECORD +347 -0
- {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/WHEEL +1 -1
- mlrun/model_monitoring/db/stores/__init__.py +0 -136
- mlrun/model_monitoring/db/stores/base/store.py +0 -213
- mlrun/model_monitoring/db/stores/sqldb/__init__.py +0 -13
- mlrun/model_monitoring/db/stores/sqldb/models/__init__.py +0 -71
- mlrun/model_monitoring/db/stores/sqldb/models/base.py +0 -190
- mlrun/model_monitoring/db/stores/sqldb/models/mysql.py +0 -103
- mlrun/model_monitoring/db/stores/sqldb/models/sqlite.py +0 -40
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +0 -659
- mlrun/model_monitoring/db/stores/v3io_kv/__init__.py +0 -13
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +0 -726
- mlrun/model_monitoring/model_endpoint.py +0 -118
- mlrun-1.7.1rc4.dist-info/RECORD +0 -351
- {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/LICENSE +0 -0
- {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.1rc4.dist-info → mlrun-1.8.0rc8.dist-info}/top_level.txt +0 -0
|
@@ -11,27 +11,22 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
-
|
|
15
|
-
import enum
|
|
14
|
+
import abc
|
|
16
15
|
import json
|
|
17
16
|
from datetime import datetime
|
|
18
17
|
from typing import Any, NamedTuple, Optional, TypeVar
|
|
19
18
|
|
|
20
|
-
from pydantic import BaseModel,
|
|
19
|
+
from pydantic.v1 import BaseModel, Field, constr
|
|
21
20
|
|
|
22
21
|
# TODO: remove the unused import below after `mlrun.datastore` and `mlrun.utils` usage is removed.
|
|
23
22
|
# At the moment `make lint` fails if this is removed.
|
|
24
|
-
import
|
|
25
|
-
|
|
26
|
-
from ..object import ObjectKind, ObjectSpec, ObjectStatus
|
|
23
|
+
from ..object import ObjectKind, ObjectMetadata, ObjectSpec, ObjectStatus
|
|
24
|
+
from . import ModelEndpointSchema
|
|
27
25
|
from .constants import (
|
|
28
26
|
FQN_REGEX,
|
|
29
27
|
MODEL_ENDPOINT_ID_PATTERN,
|
|
30
28
|
PROJECT_PATTERN,
|
|
31
29
|
EndpointType,
|
|
32
|
-
EventFieldType,
|
|
33
|
-
EventKeyMetrics,
|
|
34
|
-
EventLiveStats,
|
|
35
30
|
ModelEndpointMonitoringMetricType,
|
|
36
31
|
ModelMonitoringMode,
|
|
37
32
|
ResultKindApp,
|
|
@@ -47,77 +42,6 @@ class ModelMonitoringStoreKinds:
|
|
|
47
42
|
EVENTS = "events"
|
|
48
43
|
|
|
49
44
|
|
|
50
|
-
class ModelEndpointMetadata(BaseModel):
|
|
51
|
-
project: constr(regex=PROJECT_PATTERN)
|
|
52
|
-
uid: constr(regex=MODEL_ENDPOINT_ID_PATTERN)
|
|
53
|
-
labels: Optional[dict] = {}
|
|
54
|
-
|
|
55
|
-
class Config:
|
|
56
|
-
extra = Extra.allow
|
|
57
|
-
|
|
58
|
-
@classmethod
|
|
59
|
-
def from_flat_dict(cls, endpoint_dict: dict, json_parse_values: list = None):
|
|
60
|
-
"""Create a `ModelEndpointMetadata` object from an endpoint dictionary
|
|
61
|
-
|
|
62
|
-
:param endpoint_dict: Model endpoint dictionary.
|
|
63
|
-
:param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
|
|
64
|
-
dictionary using json.loads().
|
|
65
|
-
"""
|
|
66
|
-
if json_parse_values is None:
|
|
67
|
-
json_parse_values = [EventFieldType.LABELS]
|
|
68
|
-
|
|
69
|
-
return _mapping_attributes(
|
|
70
|
-
model_class=cls,
|
|
71
|
-
flattened_dictionary=endpoint_dict,
|
|
72
|
-
json_parse_values=json_parse_values,
|
|
73
|
-
)
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
class ModelEndpointSpec(ObjectSpec):
|
|
77
|
-
function_uri: Optional[str] = "" # <project_name>/<function_name>:<tag>
|
|
78
|
-
model: Optional[str] = "" # <model_name>:<version>
|
|
79
|
-
model_class: Optional[str] = ""
|
|
80
|
-
model_uri: Optional[str] = ""
|
|
81
|
-
feature_names: Optional[list[str]] = []
|
|
82
|
-
label_names: Optional[list[str]] = []
|
|
83
|
-
stream_path: Optional[str] = ""
|
|
84
|
-
algorithm: Optional[str] = ""
|
|
85
|
-
monitor_configuration: Optional[dict] = {}
|
|
86
|
-
active: Optional[bool] = True
|
|
87
|
-
monitoring_mode: Optional[ModelMonitoringMode] = ModelMonitoringMode.disabled.value
|
|
88
|
-
|
|
89
|
-
@classmethod
|
|
90
|
-
def from_flat_dict(cls, endpoint_dict: dict, json_parse_values: list = None):
|
|
91
|
-
"""Create a `ModelEndpointSpec` object from an endpoint dictionary
|
|
92
|
-
|
|
93
|
-
:param endpoint_dict: Model endpoint dictionary.
|
|
94
|
-
:param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
|
|
95
|
-
dictionary using json.loads().
|
|
96
|
-
"""
|
|
97
|
-
if json_parse_values is None:
|
|
98
|
-
json_parse_values = [
|
|
99
|
-
EventFieldType.FEATURE_NAMES,
|
|
100
|
-
EventFieldType.LABEL_NAMES,
|
|
101
|
-
EventFieldType.MONITOR_CONFIGURATION,
|
|
102
|
-
]
|
|
103
|
-
return _mapping_attributes(
|
|
104
|
-
model_class=cls,
|
|
105
|
-
flattened_dictionary=endpoint_dict,
|
|
106
|
-
json_parse_values=json_parse_values,
|
|
107
|
-
)
|
|
108
|
-
|
|
109
|
-
@validator("model_uri")
|
|
110
|
-
@classmethod
|
|
111
|
-
def validate_model_uri(cls, model_uri):
|
|
112
|
-
"""Validate that the model uri includes the required prefix"""
|
|
113
|
-
prefix, uri = mlrun.datastore.parse_store_uri(model_uri)
|
|
114
|
-
if prefix and prefix != mlrun.utils.helpers.StorePrefix.Model:
|
|
115
|
-
return mlrun.datastore.get_store_uri(
|
|
116
|
-
mlrun.utils.helpers.StorePrefix.Model, uri
|
|
117
|
-
)
|
|
118
|
-
return model_uri
|
|
119
|
-
|
|
120
|
-
|
|
121
45
|
class Histogram(BaseModel):
|
|
122
46
|
buckets: list[float]
|
|
123
47
|
counts: list[int]
|
|
@@ -163,48 +87,24 @@ class Features(BaseModel):
|
|
|
163
87
|
)
|
|
164
88
|
|
|
165
89
|
|
|
166
|
-
class
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
last_request: Optional[str] = ""
|
|
171
|
-
error_count: Optional[int] = 0
|
|
172
|
-
drift_status: Optional[str] = ""
|
|
173
|
-
drift_measures: Optional[dict] = {}
|
|
174
|
-
metrics: Optional[dict[str, dict[str, Any]]] = {
|
|
175
|
-
EventKeyMetrics.GENERIC: {
|
|
176
|
-
EventLiveStats.LATENCY_AVG_1H: 0,
|
|
177
|
-
EventLiveStats.PREDICTIONS_PER_SECOND: 0,
|
|
178
|
-
}
|
|
179
|
-
}
|
|
180
|
-
features: Optional[list[Features]] = []
|
|
181
|
-
children: Optional[list[str]] = []
|
|
182
|
-
children_uids: Optional[list[str]] = []
|
|
183
|
-
endpoint_type: Optional[EndpointType] = EndpointType.NODE_EP
|
|
184
|
-
monitoring_feature_set_uri: Optional[str] = ""
|
|
185
|
-
state: Optional[str] = ""
|
|
186
|
-
|
|
187
|
-
class Config:
|
|
188
|
-
extra = Extra.allow
|
|
90
|
+
class ModelEndpointParser(abc.ABC, BaseModel):
|
|
91
|
+
@classmethod
|
|
92
|
+
def json_parse_values(cls) -> list[str]:
|
|
93
|
+
return []
|
|
189
94
|
|
|
190
95
|
@classmethod
|
|
191
|
-
def from_flat_dict(
|
|
192
|
-
|
|
96
|
+
def from_flat_dict(
|
|
97
|
+
cls, endpoint_dict: dict, json_parse_values: Optional[list] = None
|
|
98
|
+
) -> "ModelEndpointParser":
|
|
99
|
+
"""Create a `ModelEndpointParser` object from an endpoint dictionary
|
|
193
100
|
|
|
194
101
|
:param endpoint_dict: Model endpoint dictionary.
|
|
195
102
|
:param json_parse_values: List of dictionary keys with a JSON string value that will be parsed into a
|
|
196
103
|
dictionary using json.loads().
|
|
197
104
|
"""
|
|
198
105
|
if json_parse_values is None:
|
|
199
|
-
json_parse_values =
|
|
200
|
-
|
|
201
|
-
EventFieldType.CURRENT_STATS,
|
|
202
|
-
EventFieldType.DRIFT_MEASURES,
|
|
203
|
-
EventFieldType.METRICS,
|
|
204
|
-
EventFieldType.CHILDREN,
|
|
205
|
-
EventFieldType.CHILDREN_UIDS,
|
|
206
|
-
EventFieldType.ENDPOINT_TYPE,
|
|
207
|
-
]
|
|
106
|
+
json_parse_values = cls.json_parse_values()
|
|
107
|
+
|
|
208
108
|
return _mapping_attributes(
|
|
209
109
|
model_class=cls,
|
|
210
110
|
flattened_dictionary=endpoint_dict,
|
|
@@ -212,16 +112,53 @@ class ModelEndpointStatus(ObjectStatus):
|
|
|
212
112
|
)
|
|
213
113
|
|
|
214
114
|
|
|
115
|
+
class ModelEndpointMetadata(ObjectMetadata, ModelEndpointParser):
|
|
116
|
+
project: constr(regex=PROJECT_PATTERN)
|
|
117
|
+
endpoint_type: EndpointType = EndpointType.NODE_EP
|
|
118
|
+
uid: Optional[constr(regex=MODEL_ENDPOINT_ID_PATTERN)]
|
|
119
|
+
|
|
120
|
+
|
|
121
|
+
class ModelEndpointSpec(ObjectSpec, ModelEndpointParser):
|
|
122
|
+
model_uid: Optional[str] = ""
|
|
123
|
+
model_name: Optional[str] = ""
|
|
124
|
+
model_tag: Optional[str] = ""
|
|
125
|
+
model_class: Optional[str] = ""
|
|
126
|
+
function_name: Optional[str] = ""
|
|
127
|
+
function_tag: Optional[str] = ""
|
|
128
|
+
function_uid: Optional[str] = ""
|
|
129
|
+
feature_names: Optional[list[str]] = []
|
|
130
|
+
label_names: Optional[list[str]] = []
|
|
131
|
+
feature_stats: Optional[dict] = {}
|
|
132
|
+
function_uri: Optional[str] = "" # <project_name>/<function_hash>
|
|
133
|
+
model_uri: Optional[str] = ""
|
|
134
|
+
children: Optional[list[str]] = []
|
|
135
|
+
children_uids: Optional[list[str]] = []
|
|
136
|
+
monitoring_feature_set_uri: Optional[str] = ""
|
|
137
|
+
|
|
138
|
+
|
|
139
|
+
class ModelEndpointStatus(ObjectStatus, ModelEndpointParser):
|
|
140
|
+
state: Optional[str] = "unknown" # will be updated according to the function state
|
|
141
|
+
first_request: Optional[datetime] = None
|
|
142
|
+
monitoring_mode: Optional[ModelMonitoringMode] = ModelMonitoringMode.disabled
|
|
143
|
+
|
|
144
|
+
# operative
|
|
145
|
+
last_request: Optional[datetime] = None
|
|
146
|
+
result_status: Optional[int] = -1
|
|
147
|
+
avg_latency: Optional[float] = None
|
|
148
|
+
error_count: Optional[int] = 0
|
|
149
|
+
current_stats: Optional[dict] = {}
|
|
150
|
+
current_stats_timestamp: Optional[datetime] = None
|
|
151
|
+
drift_measures: Optional[dict] = {}
|
|
152
|
+
drift_measures_timestamp: Optional[datetime] = None
|
|
153
|
+
|
|
154
|
+
|
|
215
155
|
class ModelEndpoint(BaseModel):
|
|
216
156
|
kind: ObjectKind = Field(ObjectKind.model_endpoint, const=True)
|
|
217
157
|
metadata: ModelEndpointMetadata
|
|
218
|
-
spec: ModelEndpointSpec
|
|
219
|
-
status: ModelEndpointStatus
|
|
158
|
+
spec: ModelEndpointSpec
|
|
159
|
+
status: ModelEndpointStatus
|
|
220
160
|
|
|
221
|
-
|
|
222
|
-
extra = Extra.allow
|
|
223
|
-
|
|
224
|
-
def flat_dict(self):
|
|
161
|
+
def flat_dict(self) -> dict[str, Any]:
|
|
225
162
|
"""Generate a flattened `ModelEndpoint` dictionary. The flattened dictionary result is important for storing
|
|
226
163
|
the model endpoint object in the database.
|
|
227
164
|
|
|
@@ -229,35 +166,24 @@ class ModelEndpoint(BaseModel):
|
|
|
229
166
|
"""
|
|
230
167
|
# Convert the ModelEndpoint object into a dictionary using BaseModel dict() function
|
|
231
168
|
# In addition, remove the BaseModel kind as it is not required by the DB schema
|
|
232
|
-
model_endpoint_dictionary = self.dict(exclude={"kind"})
|
|
233
169
|
|
|
170
|
+
model_endpoint_dictionary = self.dict(exclude={"kind"})
|
|
171
|
+
exclude = {
|
|
172
|
+
"tag",
|
|
173
|
+
ModelEndpointSchema.FEATURE_STATS,
|
|
174
|
+
ModelEndpointSchema.CURRENT_STATS,
|
|
175
|
+
ModelEndpointSchema.DRIFT_MEASURES,
|
|
176
|
+
ModelEndpointSchema.FUNCTION_URI,
|
|
177
|
+
ModelEndpointSchema.MODEL_URI,
|
|
178
|
+
}
|
|
234
179
|
# Initialize a flattened dictionary that will be filled with the model endpoint dictionary attributes
|
|
235
180
|
flatten_dict = {}
|
|
236
181
|
for k_object in model_endpoint_dictionary:
|
|
237
182
|
for key in model_endpoint_dictionary[k_object]:
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
# for matching the database required format
|
|
243
|
-
if not isinstance(current_value, (str, bool, int)) or isinstance(
|
|
244
|
-
current_value, enum.IntEnum
|
|
245
|
-
):
|
|
246
|
-
flatten_dict[key] = json.dumps(current_value)
|
|
247
|
-
else:
|
|
248
|
-
flatten_dict[key] = current_value
|
|
249
|
-
|
|
250
|
-
if EventFieldType.METRICS not in flatten_dict:
|
|
251
|
-
# Initialize metrics dictionary
|
|
252
|
-
flatten_dict[EventFieldType.METRICS] = {
|
|
253
|
-
EventKeyMetrics.GENERIC: {
|
|
254
|
-
EventLiveStats.LATENCY_AVG_1H: 0,
|
|
255
|
-
EventLiveStats.PREDICTIONS_PER_SECOND: 0,
|
|
256
|
-
}
|
|
257
|
-
}
|
|
258
|
-
|
|
259
|
-
# Remove the features from the dictionary as this field will be filled only within the feature analysis process
|
|
260
|
-
flatten_dict.pop(EventFieldType.FEATURES, None)
|
|
183
|
+
if key not in exclude:
|
|
184
|
+
# Extract the value of the current field
|
|
185
|
+
flatten_dict[key] = model_endpoint_dictionary[k_object][key]
|
|
186
|
+
|
|
261
187
|
return flatten_dict
|
|
262
188
|
|
|
263
189
|
@classmethod
|
|
@@ -274,9 +200,17 @@ class ModelEndpoint(BaseModel):
|
|
|
274
200
|
status=ModelEndpointStatus.from_flat_dict(endpoint_dict=endpoint_dict),
|
|
275
201
|
)
|
|
276
202
|
|
|
203
|
+
def get(self, field, default=None):
|
|
204
|
+
return (
|
|
205
|
+
getattr(self.metadata, field, None)
|
|
206
|
+
or getattr(self.spec, field, None)
|
|
207
|
+
or getattr(self.status, field, None)
|
|
208
|
+
or default
|
|
209
|
+
)
|
|
210
|
+
|
|
277
211
|
|
|
278
212
|
class ModelEndpointList(BaseModel):
|
|
279
|
-
endpoints: list[ModelEndpoint]
|
|
213
|
+
endpoints: list[ModelEndpoint]
|
|
280
214
|
|
|
281
215
|
|
|
282
216
|
class ModelEndpointMonitoringMetric(BaseModel):
|
|
@@ -284,7 +218,14 @@ class ModelEndpointMonitoringMetric(BaseModel):
|
|
|
284
218
|
app: str
|
|
285
219
|
type: ModelEndpointMonitoringMetricType
|
|
286
220
|
name: str
|
|
287
|
-
full_name: str
|
|
221
|
+
full_name: Optional[str] = None
|
|
222
|
+
kind: Optional[ResultKindApp] = None
|
|
223
|
+
|
|
224
|
+
def __init__(self, **kwargs):
|
|
225
|
+
super().__init__(**kwargs)
|
|
226
|
+
self.full_name = _compose_full_name(
|
|
227
|
+
project=self.project, app=self.app, name=self.name, type=self.type
|
|
228
|
+
)
|
|
288
229
|
|
|
289
230
|
|
|
290
231
|
def _compose_full_name(
|
|
@@ -315,6 +256,7 @@ class _ResultPoint(NamedTuple):
|
|
|
315
256
|
timestamp: datetime
|
|
316
257
|
value: float
|
|
317
258
|
status: ResultStatusApp
|
|
259
|
+
extra_data: Optional[str] = ""
|
|
318
260
|
|
|
319
261
|
|
|
320
262
|
class _ModelEndpointMonitoringMetricValuesBase(BaseModel):
|
|
@@ -365,8 +307,10 @@ def _mapping_attributes(
|
|
|
365
307
|
dict_to_parse[field_key] = _json_loads_if_not_none(
|
|
366
308
|
flattened_dictionary[field_key]
|
|
367
309
|
)
|
|
368
|
-
|
|
310
|
+
elif flattened_dictionary[field_key] != "null":
|
|
369
311
|
dict_to_parse[field_key] = flattened_dictionary[field_key]
|
|
312
|
+
else:
|
|
313
|
+
dict_to_parse[field_key] = None
|
|
370
314
|
|
|
371
315
|
return model_class.parse_obj(dict_to_parse)
|
|
372
316
|
|
|
@@ -15,8 +15,9 @@
|
|
|
15
15
|
import datetime
|
|
16
16
|
import enum
|
|
17
17
|
import typing
|
|
18
|
+
from typing import Optional
|
|
18
19
|
|
|
19
|
-
import pydantic
|
|
20
|
+
import pydantic.v1
|
|
20
21
|
|
|
21
22
|
import mlrun.common.types
|
|
22
23
|
|
|
@@ -45,6 +46,13 @@ class NotificationKind(mlrun.common.types.StrEnum):
|
|
|
45
46
|
slack: str = "slack"
|
|
46
47
|
"""**webhook** - The slack webhook to which to send the notification."""
|
|
47
48
|
|
|
49
|
+
mail: str = "mail"
|
|
50
|
+
"""
|
|
51
|
+
**email_addresses** - The target mails\n
|
|
52
|
+
**subject** - The subject of the mail\n
|
|
53
|
+
**body** - The body of the mail\n
|
|
54
|
+
"""
|
|
55
|
+
|
|
48
56
|
webhook: str = "webhook"
|
|
49
57
|
"""
|
|
50
58
|
**url** - The webhook url to which to send the notification.\n
|
|
@@ -86,7 +94,7 @@ class NotificationLimits(enum.Enum):
|
|
|
86
94
|
) # 900KB (k8s secret size limit is 1MB minus buffer for metadata)
|
|
87
95
|
|
|
88
96
|
|
|
89
|
-
class Notification(pydantic.BaseModel):
|
|
97
|
+
class Notification(pydantic.v1.BaseModel):
|
|
90
98
|
"""
|
|
91
99
|
Notification object schema
|
|
92
100
|
|
|
@@ -120,5 +128,18 @@ class Notification(pydantic.BaseModel):
|
|
|
120
128
|
reason: typing.Optional[str] = None
|
|
121
129
|
|
|
122
130
|
|
|
123
|
-
class SetNotificationRequest(pydantic.BaseModel):
|
|
131
|
+
class SetNotificationRequest(pydantic.v1.BaseModel):
|
|
124
132
|
notifications: list[Notification] = None
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class NotificationSummary(pydantic.v1.BaseModel):
|
|
136
|
+
failed: int = 0
|
|
137
|
+
succeeded: int = 0
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class NotificationState(pydantic.v1.BaseModel):
|
|
141
|
+
kind: str
|
|
142
|
+
err: Optional[
|
|
143
|
+
str
|
|
144
|
+
] # empty error means that the notifications were sent successfully
|
|
145
|
+
summary: NotificationSummary
|
mlrun/common/schemas/object.py
CHANGED
|
@@ -14,13 +14,13 @@
|
|
|
14
14
|
|
|
15
15
|
import typing
|
|
16
16
|
|
|
17
|
-
import pydantic
|
|
17
|
+
import pydantic.v1
|
|
18
18
|
|
|
19
19
|
|
|
20
|
-
class PaginationInfo(pydantic.BaseModel):
|
|
20
|
+
class PaginationInfo(pydantic.v1.BaseModel):
|
|
21
21
|
class Config:
|
|
22
22
|
allow_population_by_field_name = True
|
|
23
23
|
|
|
24
24
|
page: typing.Optional[int]
|
|
25
|
-
page_size: typing.Optional[int] = pydantic.Field(alias="page-size")
|
|
26
|
-
page_token: typing.Optional[str] = pydantic.Field(alias="page-token")
|
|
25
|
+
page_size: typing.Optional[int] = pydantic.v1.Field(alias="page-size")
|
|
26
|
+
page_token: typing.Optional[str] = pydantic.v1.Field(alias="page-token")
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# Copyright 2024 Iguazio
|
|
2
|
+
#
|
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
# you may not use this file except in compliance with the License.
|
|
5
|
+
# You may obtain a copy of the License at
|
|
6
|
+
#
|
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
#
|
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
# See the License for the specific language governing permissions and
|
|
13
|
+
# limitations under the License.
|
|
14
|
+
#
|
|
15
|
+
from datetime import datetime, timedelta
|
|
16
|
+
|
|
17
|
+
from mlrun.common.types import StrEnum
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class PartitionInterval(StrEnum):
|
|
21
|
+
DAY = "DAY"
|
|
22
|
+
MONTH = "MONTH"
|
|
23
|
+
YEARWEEK = "YEARWEEK"
|
|
24
|
+
|
|
25
|
+
@classmethod
|
|
26
|
+
def is_valid(cls, value: str) -> bool:
|
|
27
|
+
return value in cls._value2member_map_
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def valid_intervals(cls) -> list:
|
|
31
|
+
return list(cls._value2member_map_.keys())
|
|
32
|
+
|
|
33
|
+
def as_duration(self) -> timedelta:
|
|
34
|
+
"""
|
|
35
|
+
Convert the partition interval to a duration-like timedelta.
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
timedelta: A duration representing the partition interval.
|
|
39
|
+
"""
|
|
40
|
+
if self == PartitionInterval.DAY:
|
|
41
|
+
return timedelta(days=1)
|
|
42
|
+
elif self == PartitionInterval.MONTH:
|
|
43
|
+
# Approximate a month as 30 days
|
|
44
|
+
return timedelta(days=30)
|
|
45
|
+
elif self == PartitionInterval.YEARWEEK:
|
|
46
|
+
return timedelta(weeks=1)
|
|
47
|
+
|
|
48
|
+
@classmethod
|
|
49
|
+
def from_function(cls, partition_function: str):
|
|
50
|
+
"""
|
|
51
|
+
Returns the corresponding PartitionInterval for a given partition function,
|
|
52
|
+
or None if the function is not mapped.
|
|
53
|
+
|
|
54
|
+
:param partition_function: The partition function to map to an interval.
|
|
55
|
+
:return: PartitionInterval corresponding to the function, or None if no match is found.
|
|
56
|
+
"""
|
|
57
|
+
partition_function_to_partitions_interval = {
|
|
58
|
+
"DAY": "DAY",
|
|
59
|
+
"DAYOFMONTH": "DAY",
|
|
60
|
+
"MONTH": "MONTH",
|
|
61
|
+
"YEARWEEK": "YEARWEEK",
|
|
62
|
+
}
|
|
63
|
+
interval = partition_function_to_partitions_interval.get(partition_function)
|
|
64
|
+
if interval and cls.is_valid(interval):
|
|
65
|
+
return cls[interval]
|
|
66
|
+
raise KeyError(f"Partition function: {partition_function} isn't supported")
|
|
67
|
+
|
|
68
|
+
def get_partition_info(
|
|
69
|
+
self,
|
|
70
|
+
start_datetime: datetime,
|
|
71
|
+
partition_number: int = 1,
|
|
72
|
+
) -> list[tuple[str, str]]:
|
|
73
|
+
"""
|
|
74
|
+
Generates partition details for a specified number of partitions starting from a given datetime.
|
|
75
|
+
|
|
76
|
+
:param start_datetime: The starting datetime used for generating partition details.
|
|
77
|
+
:param partition_number: The number of partitions to generate details for.
|
|
78
|
+
|
|
79
|
+
:return: A list of tuples:
|
|
80
|
+
- partition_name: The name for the partition.
|
|
81
|
+
- partition_value: The "LESS THAN" value for the next partition boundary.
|
|
82
|
+
"""
|
|
83
|
+
partitioning_information_list = []
|
|
84
|
+
current_datetime = start_datetime
|
|
85
|
+
|
|
86
|
+
for _ in range(partition_number):
|
|
87
|
+
partition_name = self.get_partition_name(current_datetime)
|
|
88
|
+
partition_boundary_date = self.get_next_partition_time(current_datetime)
|
|
89
|
+
partition_value = self.get_partition_name(partition_boundary_date)
|
|
90
|
+
partitioning_information_list.append((partition_name, partition_value))
|
|
91
|
+
|
|
92
|
+
# Move to the next interval
|
|
93
|
+
current_datetime = partition_boundary_date
|
|
94
|
+
|
|
95
|
+
return partitioning_information_list
|
|
96
|
+
|
|
97
|
+
def get_next_partition_time(self, current_datetime: datetime) -> datetime:
|
|
98
|
+
"""
|
|
99
|
+
Calculates the next partition boundary time based on the specified partition interval.
|
|
100
|
+
:param current_datetime: The current datetime from which the next interval is calculated.
|
|
101
|
+
|
|
102
|
+
:return: A datetime object representing the start of the next partition interval.
|
|
103
|
+
- If the interval is DAY, it advances by one day.
|
|
104
|
+
- If the interval is MONTH, it advances to the first day of the next month.
|
|
105
|
+
- If the interval is YEARWEEK, it advances by one week.
|
|
106
|
+
"""
|
|
107
|
+
if self == PartitionInterval.DAY:
|
|
108
|
+
return current_datetime + timedelta(days=1)
|
|
109
|
+
elif self == PartitionInterval.MONTH:
|
|
110
|
+
return (current_datetime.replace(day=1) + timedelta(days=32)).replace(day=1)
|
|
111
|
+
elif self == PartitionInterval.YEARWEEK:
|
|
112
|
+
return current_datetime + timedelta(weeks=1)
|
|
113
|
+
|
|
114
|
+
def get_partition_name(self, current_datetime: datetime) -> str:
|
|
115
|
+
if self == PartitionInterval.DAY:
|
|
116
|
+
return current_datetime.strftime("%Y%m%d")
|
|
117
|
+
elif self == PartitionInterval.MONTH:
|
|
118
|
+
return current_datetime.strftime("%Y%m")
|
|
119
|
+
elif self == PartitionInterval.YEARWEEK:
|
|
120
|
+
year, week, _ = current_datetime.isocalendar()
|
|
121
|
+
return f"{year}{week:02d}"
|
|
122
|
+
|
|
123
|
+
def get_partition_expression(self):
|
|
124
|
+
if self == PartitionInterval.YEARWEEK:
|
|
125
|
+
return "YEARWEEK(activation_time, 1)"
|
|
126
|
+
else:
|
|
127
|
+
return f"{self}(activation_time)"
|
|
128
|
+
|
|
129
|
+
def get_number_of_partitions(self, days: int) -> int:
|
|
130
|
+
# Calculate the number partitions based on given number of days
|
|
131
|
+
if self == PartitionInterval.DAY:
|
|
132
|
+
return days
|
|
133
|
+
elif self == PartitionInterval.MONTH:
|
|
134
|
+
# Average number days in a month is 30.44
|
|
135
|
+
return int(days / 30.44)
|
|
136
|
+
elif self == PartitionInterval.YEARWEEK:
|
|
137
|
+
return int(days / 7)
|
mlrun/common/schemas/pipeline.py
CHANGED
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
#
|
|
15
15
|
import typing
|
|
16
16
|
|
|
17
|
-
import pydantic
|
|
17
|
+
import pydantic.v1
|
|
18
18
|
from deprecated import deprecated
|
|
19
19
|
|
|
20
20
|
import mlrun.common.types
|
|
@@ -39,7 +39,7 @@ class PipelinesPagination(str):
|
|
|
39
39
|
max_page_size = 200
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
class PipelinesOutput(pydantic.BaseModel):
|
|
42
|
+
class PipelinesOutput(pydantic.v1.BaseModel):
|
|
43
43
|
# use the format query param to control what is returned
|
|
44
44
|
runs: list[typing.Union[dict, str]]
|
|
45
45
|
total_size: int
|
mlrun/common/schemas/project.py
CHANGED
|
@@ -15,7 +15,7 @@
|
|
|
15
15
|
import datetime
|
|
16
16
|
import typing
|
|
17
17
|
|
|
18
|
-
import pydantic
|
|
18
|
+
import pydantic.v1
|
|
19
19
|
from deprecated import deprecated
|
|
20
20
|
|
|
21
21
|
import mlrun.common.types
|
|
@@ -40,14 +40,14 @@ class ProjectsFormat(mlrun.common.types.StrEnum):
|
|
|
40
40
|
leader = "leader"
|
|
41
41
|
|
|
42
42
|
|
|
43
|
-
class ProjectMetadata(pydantic.BaseModel):
|
|
43
|
+
class ProjectMetadata(pydantic.v1.BaseModel):
|
|
44
44
|
name: str
|
|
45
45
|
created: typing.Optional[datetime.datetime] = None
|
|
46
46
|
labels: typing.Optional[dict] = {}
|
|
47
47
|
annotations: typing.Optional[dict] = {}
|
|
48
48
|
|
|
49
49
|
class Config:
|
|
50
|
-
extra = pydantic.Extra.allow
|
|
50
|
+
extra = pydantic.v1.Extra.allow
|
|
51
51
|
|
|
52
52
|
|
|
53
53
|
class ProjectDesiredState(mlrun.common.types.StrEnum):
|
|
@@ -77,7 +77,7 @@ class ProjectStatus(ObjectStatus):
|
|
|
77
77
|
state: typing.Optional[ProjectState]
|
|
78
78
|
|
|
79
79
|
|
|
80
|
-
class ProjectSpec(pydantic.BaseModel):
|
|
80
|
+
class ProjectSpec(pydantic.v1.BaseModel):
|
|
81
81
|
description: typing.Optional[str] = None
|
|
82
82
|
owner: typing.Optional[str] = None
|
|
83
83
|
goals: typing.Optional[str] = None
|
|
@@ -97,10 +97,10 @@ class ProjectSpec(pydantic.BaseModel):
|
|
|
97
97
|
default_function_node_selector: typing.Optional[dict] = {}
|
|
98
98
|
|
|
99
99
|
class Config:
|
|
100
|
-
extra = pydantic.Extra.allow
|
|
100
|
+
extra = pydantic.v1.Extra.allow
|
|
101
101
|
|
|
102
102
|
|
|
103
|
-
class ProjectSpecOut(pydantic.BaseModel):
|
|
103
|
+
class ProjectSpecOut(pydantic.v1.BaseModel):
|
|
104
104
|
description: typing.Optional[str] = None
|
|
105
105
|
owner: typing.Optional[str] = None
|
|
106
106
|
goals: typing.Optional[str] = None
|
|
@@ -120,11 +120,11 @@ class ProjectSpecOut(pydantic.BaseModel):
|
|
|
120
120
|
default_function_node_selector: typing.Optional[dict] = {}
|
|
121
121
|
|
|
122
122
|
class Config:
|
|
123
|
-
extra = pydantic.Extra.allow
|
|
123
|
+
extra = pydantic.v1.Extra.allow
|
|
124
124
|
|
|
125
125
|
|
|
126
|
-
class Project(pydantic.BaseModel):
|
|
127
|
-
kind: ObjectKind = pydantic.Field(ObjectKind.project, const=True)
|
|
126
|
+
class Project(pydantic.v1.BaseModel):
|
|
127
|
+
kind: ObjectKind = pydantic.v1.Field(ObjectKind.project, const=True)
|
|
128
128
|
metadata: ProjectMetadata
|
|
129
129
|
spec: ProjectSpec = ProjectSpec()
|
|
130
130
|
status: ObjectStatus = ObjectStatus()
|
|
@@ -132,19 +132,19 @@ class Project(pydantic.BaseModel):
|
|
|
132
132
|
|
|
133
133
|
# The reason we have a different schema for the response model is that we don't want to validate project.spec.build in
|
|
134
134
|
# the response as the validation was added late and there may be corrupted values in the DB.
|
|
135
|
-
class ProjectOut(pydantic.BaseModel):
|
|
136
|
-
kind: ObjectKind = pydantic.Field(ObjectKind.project, const=True)
|
|
135
|
+
class ProjectOut(pydantic.v1.BaseModel):
|
|
136
|
+
kind: ObjectKind = pydantic.v1.Field(ObjectKind.project, const=True)
|
|
137
137
|
metadata: ProjectMetadata
|
|
138
138
|
spec: ProjectSpecOut = ProjectSpecOut()
|
|
139
139
|
status: ObjectStatus = ObjectStatus()
|
|
140
140
|
|
|
141
141
|
|
|
142
|
-
class ProjectOwner(pydantic.BaseModel):
|
|
142
|
+
class ProjectOwner(pydantic.v1.BaseModel):
|
|
143
143
|
username: str
|
|
144
144
|
access_key: str
|
|
145
145
|
|
|
146
146
|
|
|
147
|
-
class ProjectSummary(pydantic.BaseModel):
|
|
147
|
+
class ProjectSummary(pydantic.v1.BaseModel):
|
|
148
148
|
name: str
|
|
149
149
|
files_count: int = 0
|
|
150
150
|
feature_sets_count: int = 0
|
|
@@ -159,9 +159,12 @@ class ProjectSummary(pydantic.BaseModel):
|
|
|
159
159
|
pipelines_failed_recent_count: typing.Optional[int] = None
|
|
160
160
|
pipelines_running_count: typing.Optional[int] = None
|
|
161
161
|
updated: typing.Optional[datetime.datetime] = None
|
|
162
|
+
endpoint_alerts_count: int = 0
|
|
163
|
+
job_alerts_count: int = 0
|
|
164
|
+
other_alerts_count: int = 0
|
|
162
165
|
|
|
163
166
|
|
|
164
|
-
class IguazioProject(pydantic.BaseModel):
|
|
167
|
+
class IguazioProject(pydantic.v1.BaseModel):
|
|
165
168
|
data: dict
|
|
166
169
|
|
|
167
170
|
|
|
@@ -175,13 +178,18 @@ class IguazioProject(pydantic.BaseModel):
|
|
|
175
178
|
# to add a specific classes for them. it's frustrating but couldn't find other workaround, see:
|
|
176
179
|
# https://github.com/samuelcolvin/pydantic/issues/1423, https://github.com/samuelcolvin/pydantic/issues/619
|
|
177
180
|
ProjectOutput = typing.TypeVar(
|
|
178
|
-
"ProjectOutput",
|
|
181
|
+
"ProjectOutput",
|
|
182
|
+
ProjectOut,
|
|
183
|
+
str,
|
|
184
|
+
ProjectSummary,
|
|
185
|
+
IguazioProject,
|
|
186
|
+
tuple[str, datetime.datetime],
|
|
179
187
|
)
|
|
180
188
|
|
|
181
189
|
|
|
182
|
-
class ProjectsOutput(pydantic.BaseModel):
|
|
190
|
+
class ProjectsOutput(pydantic.v1.BaseModel):
|
|
183
191
|
projects: list[ProjectOutput]
|
|
184
192
|
|
|
185
193
|
|
|
186
|
-
class ProjectSummariesOutput(pydantic.BaseModel):
|
|
194
|
+
class ProjectSummariesOutput(pydantic.v1.BaseModel):
|
|
187
195
|
project_summaries: list[ProjectSummary]
|