mlrun 1.7.0rc21__py3-none-any.whl → 1.7.0rc23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/alerts/alert.py +42 -17
- mlrun/common/schemas/__init__.py +2 -0
- mlrun/common/schemas/feature_store.py +78 -28
- mlrun/config.py +3 -0
- mlrun/db/base.py +1 -0
- mlrun/db/httpdb.py +9 -6
- mlrun/db/nopdb.py +1 -0
- mlrun/errors.py +1 -3
- mlrun/execution.py +2 -0
- mlrun/launcher/local.py +4 -0
- mlrun/launcher/remote.py +1 -0
- mlrun/model.py +2 -0
- mlrun/model_monitoring/api.py +1 -0
- mlrun/model_monitoring/applications/base.py +3 -3
- mlrun/model_monitoring/db/stores/__init__.py +27 -21
- mlrun/model_monitoring/db/stores/base/store.py +1 -0
- mlrun/model_monitoring/db/stores/sqldb/sql_store.py +8 -8
- mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +8 -8
- mlrun/model_monitoring/db/tsdb/__init__.py +1 -1
- mlrun/model_monitoring/db/tsdb/base.py +1 -14
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +22 -18
- mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +67 -46
- mlrun/model_monitoring/helpers.py +25 -4
- mlrun/model_monitoring/stream_processing.py +9 -11
- mlrun/model_monitoring/writer.py +10 -6
- mlrun/projects/operations.py +5 -0
- mlrun/projects/project.py +11 -1
- mlrun/runtimes/base.py +6 -0
- mlrun/runtimes/daskjob.py +1 -0
- mlrun/runtimes/databricks_job/databricks_runtime.py +1 -0
- mlrun/runtimes/local.py +7 -1
- mlrun/runtimes/nuclio/application/application.py +0 -2
- mlrun/runtimes/nuclio/serving.py +9 -6
- mlrun/serving/__init__.py +8 -1
- mlrun/serving/states.py +51 -8
- mlrun/serving/utils.py +19 -11
- mlrun/serving/v2_serving.py +54 -38
- mlrun/utils/helpers.py +51 -9
- mlrun/utils/notifications/notification/base.py +39 -7
- mlrun/utils/notifications/notification/slack.py +1 -14
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/METADATA +1 -1
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/RECORD +47 -47
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/LICENSE +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/WHEEL +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/entry_points.txt +0 -0
- {mlrun-1.7.0rc21.dist-info → mlrun-1.7.0rc23.dist-info}/top_level.txt +0 -0
mlrun/alerts/alert.py
CHANGED
|
@@ -30,6 +30,11 @@ class AlertConfig(ModelObj):
|
|
|
30
30
|
"reset_policy",
|
|
31
31
|
"state",
|
|
32
32
|
]
|
|
33
|
+
_fields_to_serialize = ModelObj._fields_to_serialize + [
|
|
34
|
+
"entities",
|
|
35
|
+
"notifications",
|
|
36
|
+
"trigger",
|
|
37
|
+
]
|
|
33
38
|
|
|
34
39
|
def __init__(
|
|
35
40
|
self,
|
|
@@ -71,24 +76,44 @@ class AlertConfig(ModelObj):
|
|
|
71
76
|
if not self.project or not self.name:
|
|
72
77
|
raise mlrun.errors.MLRunBadRequestError("Project and name must be provided")
|
|
73
78
|
|
|
79
|
+
def _serialize_field(
|
|
80
|
+
self, struct: dict, field_name: str = None, strip: bool = False
|
|
81
|
+
):
|
|
82
|
+
if field_name == "entities":
|
|
83
|
+
if self.entities:
|
|
84
|
+
return (
|
|
85
|
+
self.entities.dict()
|
|
86
|
+
if not isinstance(self.entities, dict)
|
|
87
|
+
else self.entities
|
|
88
|
+
)
|
|
89
|
+
return None
|
|
90
|
+
if field_name == "notifications":
|
|
91
|
+
if self.notifications:
|
|
92
|
+
return [
|
|
93
|
+
notification_data.dict()
|
|
94
|
+
if not isinstance(notification_data, dict)
|
|
95
|
+
else notification_data
|
|
96
|
+
for notification_data in self.notifications
|
|
97
|
+
]
|
|
98
|
+
return None
|
|
99
|
+
if field_name == "trigger":
|
|
100
|
+
if self.trigger:
|
|
101
|
+
return (
|
|
102
|
+
self.trigger.dict()
|
|
103
|
+
if not isinstance(self.trigger, dict)
|
|
104
|
+
else self.trigger
|
|
105
|
+
)
|
|
106
|
+
return None
|
|
107
|
+
return super()._serialize_field(struct, field_name, strip)
|
|
108
|
+
|
|
74
109
|
def to_dict(self, fields: list = None, exclude: list = None, strip: bool = False):
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
)
|
|
82
|
-
data["notifications"] = [
|
|
83
|
-
notification_data.dict()
|
|
84
|
-
if not isinstance(notification_data, dict)
|
|
85
|
-
else notification_data
|
|
86
|
-
for notification_data in self.notifications
|
|
87
|
-
]
|
|
88
|
-
data["trigger"] = (
|
|
89
|
-
self.trigger.dict() if not isinstance(self.trigger, dict) else self.trigger
|
|
90
|
-
)
|
|
91
|
-
return data
|
|
110
|
+
if self.entities is None:
|
|
111
|
+
raise mlrun.errors.MLRunBadRequestError("Alert entity field is missing")
|
|
112
|
+
if not self.notifications:
|
|
113
|
+
raise mlrun.errors.MLRunBadRequestError(
|
|
114
|
+
"Alert must have at least one notification"
|
|
115
|
+
)
|
|
116
|
+
return super().to_dict(self._dict_fields)
|
|
92
117
|
|
|
93
118
|
@classmethod
|
|
94
119
|
def from_dict(cls, struct=None, fields=None, deprecated_fields: dict = None):
|
mlrun/common/schemas/__init__.py
CHANGED
|
@@ -82,6 +82,7 @@ from .events import (
|
|
|
82
82
|
)
|
|
83
83
|
from .feature_store import (
|
|
84
84
|
EntitiesOutput,
|
|
85
|
+
EntitiesOutputV2,
|
|
85
86
|
Entity,
|
|
86
87
|
EntityListOutput,
|
|
87
88
|
EntityRecord,
|
|
@@ -98,6 +99,7 @@ from .feature_store import (
|
|
|
98
99
|
FeatureSetSpec,
|
|
99
100
|
FeatureSetsTagsOutput,
|
|
100
101
|
FeaturesOutput,
|
|
102
|
+
FeaturesOutputV2,
|
|
101
103
|
FeatureVector,
|
|
102
104
|
FeatureVectorRecord,
|
|
103
105
|
FeatureVectorsOutput,
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
#
|
|
15
15
|
from typing import Optional
|
|
16
16
|
|
|
17
|
-
|
|
17
|
+
import pydantic
|
|
18
18
|
|
|
19
19
|
from .auth import AuthorizationResourceTypes, Credentials
|
|
20
20
|
from .object import (
|
|
@@ -27,32 +27,62 @@ from .object import (
|
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
class
|
|
30
|
+
class FeatureStoreBaseModel(pydantic.BaseModel):
|
|
31
|
+
"""
|
|
32
|
+
Intermediate base class, in order to override pydantic's configuration, as per
|
|
33
|
+
https://docs.pydantic.dev/1.10/usage/model_config/#change-behaviour-globally
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
class Config:
|
|
37
|
+
copy_on_model_validation = "none"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
class Feature(FeatureStoreBaseModel):
|
|
41
|
+
name: str
|
|
42
|
+
value_type: str
|
|
43
|
+
labels: Optional[dict] = {}
|
|
44
|
+
|
|
45
|
+
class Config:
|
|
46
|
+
extra = pydantic.Extra.allow
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class QualifiedFeature(FeatureStoreBaseModel):
|
|
50
|
+
name: str
|
|
51
|
+
value_type: str
|
|
52
|
+
feature_set_index: int
|
|
53
|
+
labels: Optional[dict] = {}
|
|
54
|
+
|
|
55
|
+
class Config:
|
|
56
|
+
extra = pydantic.Extra.allow
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class Entity(FeatureStoreBaseModel):
|
|
31
60
|
name: str
|
|
32
61
|
value_type: str
|
|
33
62
|
labels: Optional[dict] = {}
|
|
34
63
|
|
|
35
64
|
class Config:
|
|
36
|
-
extra = Extra.allow
|
|
65
|
+
extra = pydantic.Extra.allow
|
|
37
66
|
|
|
38
67
|
|
|
39
|
-
class
|
|
68
|
+
class QualifiedEntity(FeatureStoreBaseModel):
|
|
40
69
|
name: str
|
|
41
70
|
value_type: str
|
|
71
|
+
feature_set_index: int
|
|
42
72
|
labels: Optional[dict] = {}
|
|
43
73
|
|
|
44
74
|
class Config:
|
|
45
|
-
extra = Extra.allow
|
|
75
|
+
extra = pydantic.Extra.allow
|
|
46
76
|
|
|
47
77
|
|
|
48
78
|
class FeatureSetSpec(ObjectSpec):
|
|
49
79
|
entities: list[Entity] = []
|
|
50
80
|
features: list[Feature] = []
|
|
51
|
-
engine: Optional[str] = Field(default="storey")
|
|
81
|
+
engine: Optional[str] = pydantic.Field(default="storey")
|
|
52
82
|
|
|
53
83
|
|
|
54
|
-
class FeatureSet(
|
|
55
|
-
kind: ObjectKind = Field(ObjectKind.feature_set, const=True)
|
|
84
|
+
class FeatureSet(FeatureStoreBaseModel):
|
|
85
|
+
kind: ObjectKind = pydantic.Field(ObjectKind.feature_set, const=True)
|
|
56
86
|
metadata: ObjectMetadata
|
|
57
87
|
spec: FeatureSetSpec
|
|
58
88
|
status: ObjectStatus
|
|
@@ -62,7 +92,7 @@ class FeatureSet(BaseModel):
|
|
|
62
92
|
return AuthorizationResourceTypes.feature_set
|
|
63
93
|
|
|
64
94
|
|
|
65
|
-
class EntityRecord(
|
|
95
|
+
class EntityRecord(FeatureStoreBaseModel):
|
|
66
96
|
name: str
|
|
67
97
|
value_type: str
|
|
68
98
|
labels: list[LabelRecord]
|
|
@@ -71,7 +101,7 @@ class EntityRecord(BaseModel):
|
|
|
71
101
|
orm_mode = True
|
|
72
102
|
|
|
73
103
|
|
|
74
|
-
class FeatureRecord(
|
|
104
|
+
class FeatureRecord(FeatureStoreBaseModel):
|
|
75
105
|
name: str
|
|
76
106
|
value_type: str
|
|
77
107
|
labels: list[LabelRecord]
|
|
@@ -88,44 +118,64 @@ class FeatureSetRecord(ObjectRecord):
|
|
|
88
118
|
orm_mode = True
|
|
89
119
|
|
|
90
120
|
|
|
91
|
-
class FeatureSetsOutput(
|
|
121
|
+
class FeatureSetsOutput(FeatureStoreBaseModel):
|
|
92
122
|
feature_sets: list[FeatureSet]
|
|
93
123
|
|
|
94
124
|
|
|
95
|
-
class FeatureSetsTagsOutput(
|
|
125
|
+
class FeatureSetsTagsOutput(FeatureStoreBaseModel):
|
|
96
126
|
tags: list[str] = []
|
|
97
127
|
|
|
98
128
|
|
|
99
|
-
class FeatureSetDigestSpec(
|
|
129
|
+
class FeatureSetDigestSpec(FeatureStoreBaseModel):
|
|
100
130
|
entities: list[Entity]
|
|
101
131
|
features: list[Feature]
|
|
102
132
|
|
|
103
133
|
|
|
104
|
-
class FeatureSetDigestOutput(
|
|
134
|
+
class FeatureSetDigestOutput(FeatureStoreBaseModel):
|
|
105
135
|
metadata: ObjectMetadata
|
|
106
136
|
spec: FeatureSetDigestSpec
|
|
107
137
|
|
|
108
138
|
|
|
109
|
-
class
|
|
139
|
+
class FeatureSetDigestSpecV2(FeatureStoreBaseModel):
|
|
140
|
+
entities: list[Entity]
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
class FeatureSetDigestOutputV2(FeatureStoreBaseModel):
|
|
144
|
+
feature_set_index: int
|
|
145
|
+
metadata: ObjectMetadata
|
|
146
|
+
spec: FeatureSetDigestSpecV2
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
class FeatureListOutput(FeatureStoreBaseModel):
|
|
110
150
|
feature: Feature
|
|
111
151
|
feature_set_digest: FeatureSetDigestOutput
|
|
112
152
|
|
|
113
153
|
|
|
114
|
-
class FeaturesOutput(
|
|
154
|
+
class FeaturesOutput(FeatureStoreBaseModel):
|
|
115
155
|
features: list[FeatureListOutput]
|
|
116
156
|
|
|
117
157
|
|
|
118
|
-
class
|
|
158
|
+
class FeaturesOutputV2(FeatureStoreBaseModel):
|
|
159
|
+
features: list[QualifiedFeature]
|
|
160
|
+
feature_set_digests: list[FeatureSetDigestOutputV2]
|
|
161
|
+
|
|
162
|
+
|
|
163
|
+
class EntityListOutput(FeatureStoreBaseModel):
|
|
119
164
|
entity: Entity
|
|
120
165
|
feature_set_digest: FeatureSetDigestOutput
|
|
121
166
|
|
|
122
167
|
|
|
123
|
-
class
|
|
168
|
+
class EntitiesOutputV2(FeatureStoreBaseModel):
|
|
169
|
+
entities: list[QualifiedEntity]
|
|
170
|
+
feature_set_digests: list[FeatureSetDigestOutputV2]
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
class EntitiesOutput(FeatureStoreBaseModel):
|
|
124
174
|
entities: list[EntityListOutput]
|
|
125
175
|
|
|
126
176
|
|
|
127
|
-
class FeatureVector(
|
|
128
|
-
kind: ObjectKind = Field(ObjectKind.feature_vector, const=True)
|
|
177
|
+
class FeatureVector(FeatureStoreBaseModel):
|
|
178
|
+
kind: ObjectKind = pydantic.Field(ObjectKind.feature_vector, const=True)
|
|
129
179
|
metadata: ObjectMetadata
|
|
130
180
|
spec: ObjectSpec
|
|
131
181
|
status: ObjectStatus
|
|
@@ -139,39 +189,39 @@ class FeatureVectorRecord(ObjectRecord):
|
|
|
139
189
|
pass
|
|
140
190
|
|
|
141
191
|
|
|
142
|
-
class FeatureVectorsOutput(
|
|
192
|
+
class FeatureVectorsOutput(FeatureStoreBaseModel):
|
|
143
193
|
feature_vectors: list[FeatureVector]
|
|
144
194
|
|
|
145
195
|
|
|
146
|
-
class FeatureVectorsTagsOutput(
|
|
196
|
+
class FeatureVectorsTagsOutput(FeatureStoreBaseModel):
|
|
147
197
|
tags: list[str] = []
|
|
148
198
|
|
|
149
199
|
|
|
150
|
-
class DataSource(
|
|
200
|
+
class DataSource(FeatureStoreBaseModel):
|
|
151
201
|
kind: str
|
|
152
202
|
name: str
|
|
153
203
|
path: str
|
|
154
204
|
|
|
155
205
|
class Config:
|
|
156
|
-
extra = Extra.allow
|
|
206
|
+
extra = pydantic.Extra.allow
|
|
157
207
|
|
|
158
208
|
|
|
159
|
-
class DataTarget(
|
|
209
|
+
class DataTarget(FeatureStoreBaseModel):
|
|
160
210
|
kind: str
|
|
161
211
|
name: str
|
|
162
212
|
path: Optional[str]
|
|
163
213
|
|
|
164
214
|
class Config:
|
|
165
|
-
extra = Extra.allow
|
|
215
|
+
extra = pydantic.Extra.allow
|
|
166
216
|
|
|
167
217
|
|
|
168
|
-
class FeatureSetIngestInput(
|
|
218
|
+
class FeatureSetIngestInput(FeatureStoreBaseModel):
|
|
169
219
|
source: Optional[DataSource]
|
|
170
220
|
targets: Optional[list[DataTarget]]
|
|
171
221
|
infer_options: Optional[int]
|
|
172
222
|
credentials: Credentials = Credentials()
|
|
173
223
|
|
|
174
224
|
|
|
175
|
-
class FeatureSetIngestOutput(
|
|
225
|
+
class FeatureSetIngestOutput(FeatureStoreBaseModel):
|
|
176
226
|
feature_set: FeatureSet
|
|
177
227
|
run_object: dict
|
mlrun/config.py
CHANGED
|
@@ -229,6 +229,9 @@ default_config = {
|
|
|
229
229
|
"executing": "24h",
|
|
230
230
|
}
|
|
231
231
|
},
|
|
232
|
+
# When the module is reloaded, the maximum depth recursion configuration for the recursive reload
|
|
233
|
+
# function is used to prevent infinite loop
|
|
234
|
+
"reload_max_recursion_depth": 100,
|
|
232
235
|
},
|
|
233
236
|
"databricks": {
|
|
234
237
|
"artifact_directory_path": "/mlrun_databricks_runtime/artifacts_dictionaries"
|
mlrun/db/base.py
CHANGED
mlrun/db/httpdb.py
CHANGED
|
@@ -3320,6 +3320,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3320
3320
|
base_period: int = 10,
|
|
3321
3321
|
image: str = "mlrun/mlrun",
|
|
3322
3322
|
deploy_histogram_data_drift_app: bool = True,
|
|
3323
|
+
rebuild_images: bool = False,
|
|
3323
3324
|
) -> None:
|
|
3324
3325
|
"""
|
|
3325
3326
|
Deploy model monitoring application controller, writer and stream functions.
|
|
@@ -3329,13 +3330,14 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3329
3330
|
The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
|
|
3330
3331
|
is detected. It processes the new events into statistics that are then written to statistics databases.
|
|
3331
3332
|
|
|
3332
|
-
:param project:
|
|
3333
|
-
:param base_period:
|
|
3334
|
-
|
|
3335
|
-
:param image:
|
|
3336
|
-
|
|
3337
|
-
|
|
3333
|
+
:param project: Project name.
|
|
3334
|
+
:param base_period: The time period in minutes in which the model monitoring controller
|
|
3335
|
+
function triggers. By default, the base period is 10 minutes.
|
|
3336
|
+
:param image: The image of the model monitoring controller, writer & monitoring
|
|
3337
|
+
stream functions, which are real time nuclio functions.
|
|
3338
|
+
By default, the image is mlrun/mlrun.
|
|
3338
3339
|
:param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
|
|
3340
|
+
:param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
|
|
3339
3341
|
"""
|
|
3340
3342
|
self.api_call(
|
|
3341
3343
|
method=mlrun.common.types.HTTPMethod.POST,
|
|
@@ -3344,6 +3346,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
3344
3346
|
"base_period": base_period,
|
|
3345
3347
|
"image": image,
|
|
3346
3348
|
"deploy_histogram_data_drift_app": deploy_histogram_data_drift_app,
|
|
3349
|
+
"rebuild_images": rebuild_images,
|
|
3347
3350
|
},
|
|
3348
3351
|
)
|
|
3349
3352
|
|
mlrun/db/nopdb.py
CHANGED
mlrun/errors.py
CHANGED
|
@@ -92,9 +92,7 @@ def raise_for_status(
|
|
|
92
92
|
try:
|
|
93
93
|
response.raise_for_status()
|
|
94
94
|
except (requests.HTTPError, aiohttp.ClientResponseError) as exc:
|
|
95
|
-
error_message = err_to_str(exc)
|
|
96
|
-
if message:
|
|
97
|
-
error_message = f"{error_message}: {message}"
|
|
95
|
+
error_message = err_to_str(exc) if not message else message
|
|
98
96
|
status_code = (
|
|
99
97
|
response.status_code
|
|
100
98
|
if hasattr(response, "status_code")
|
mlrun/execution.py
CHANGED
|
@@ -111,6 +111,7 @@ class MLClientCtx:
|
|
|
111
111
|
|
|
112
112
|
self._project_object = None
|
|
113
113
|
self._allow_empty_resources = None
|
|
114
|
+
self._reset_on_run = None
|
|
114
115
|
|
|
115
116
|
def __enter__(self):
|
|
116
117
|
return self
|
|
@@ -389,6 +390,7 @@ class MLClientCtx:
|
|
|
389
390
|
self._state_thresholds = spec.get(
|
|
390
391
|
"state_thresholds", self._state_thresholds
|
|
391
392
|
)
|
|
393
|
+
self._reset_on_run = spec.get("reset_on_run", self._reset_on_run)
|
|
392
394
|
|
|
393
395
|
self._init_dbs(rundb)
|
|
394
396
|
|
mlrun/launcher/local.py
CHANGED
|
@@ -69,6 +69,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
69
69
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
70
70
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
71
71
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
72
|
+
reset_on_run: Optional[bool] = None,
|
|
72
73
|
) -> "mlrun.run.RunObject":
|
|
73
74
|
# do not allow local function to be scheduled
|
|
74
75
|
if self._is_run_local and schedule is not None:
|
|
@@ -88,6 +89,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
88
89
|
name=name,
|
|
89
90
|
workdir=workdir,
|
|
90
91
|
handler=handler,
|
|
92
|
+
reset_on_run=reset_on_run,
|
|
91
93
|
)
|
|
92
94
|
|
|
93
95
|
# sanity check
|
|
@@ -212,6 +214,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
212
214
|
name: Optional[str] = "",
|
|
213
215
|
workdir: Optional[str] = "",
|
|
214
216
|
handler: Optional[str] = None,
|
|
217
|
+
reset_on_run: Optional[bool] = None,
|
|
215
218
|
):
|
|
216
219
|
project = project or runtime.metadata.project
|
|
217
220
|
function_name = name or runtime.metadata.name
|
|
@@ -250,6 +253,7 @@ class ClientLocalLauncher(launcher.ClientBaseLauncher):
|
|
|
250
253
|
fn.spec.build = runtime.spec.build
|
|
251
254
|
|
|
252
255
|
run.spec.handler = handler
|
|
256
|
+
run.spec.reset_on_run = reset_on_run
|
|
253
257
|
return fn
|
|
254
258
|
|
|
255
259
|
@staticmethod
|
mlrun/launcher/remote.py
CHANGED
|
@@ -59,6 +59,7 @@ class ClientRemoteLauncher(launcher.ClientBaseLauncher):
|
|
|
59
59
|
notifications: Optional[list[mlrun.model.Notification]] = None,
|
|
60
60
|
returns: Optional[list[Union[str, dict[str, str]]]] = None,
|
|
61
61
|
state_thresholds: Optional[dict[str, int]] = None,
|
|
62
|
+
reset_on_run: Optional[bool] = None,
|
|
62
63
|
) -> "mlrun.run.RunObject":
|
|
63
64
|
self.enrich_runtime(runtime, project)
|
|
64
65
|
run = self._create_run_object(task)
|
mlrun/model.py
CHANGED
|
@@ -872,6 +872,7 @@ class RunSpec(ModelObj):
|
|
|
872
872
|
returns=None,
|
|
873
873
|
notifications=None,
|
|
874
874
|
state_thresholds=None,
|
|
875
|
+
reset_on_run=None,
|
|
875
876
|
):
|
|
876
877
|
# A dictionary of parsing configurations that will be read from the inputs the user set. The keys are the inputs
|
|
877
878
|
# keys (parameter names) and the values are the type hint given in the input keys after the colon.
|
|
@@ -908,6 +909,7 @@ class RunSpec(ModelObj):
|
|
|
908
909
|
self.allow_empty_resources = allow_empty_resources
|
|
909
910
|
self._notifications = notifications or []
|
|
910
911
|
self.state_thresholds = state_thresholds or {}
|
|
912
|
+
self.reset_on_run = reset_on_run
|
|
911
913
|
|
|
912
914
|
def _serialize_field(
|
|
913
915
|
self, struct: dict, field_name: str = None, strip: bool = False
|
mlrun/model_monitoring/api.py
CHANGED
|
@@ -645,6 +645,7 @@ def _create_model_monitoring_function_base(
|
|
|
645
645
|
app_step = prepare_step.to(class_name=application_class, **application_kwargs)
|
|
646
646
|
else:
|
|
647
647
|
app_step = prepare_step.to(class_name=application_class)
|
|
648
|
+
app_step.__class__ = mlrun.serving.MonitoringApplicationStep
|
|
648
649
|
app_step.to(
|
|
649
650
|
class_name="mlrun.model_monitoring.applications._application_steps._PushToMonitoringWriter",
|
|
650
651
|
name="PushToMonitoringWriter",
|
|
@@ -21,10 +21,10 @@ import pandas as pd
|
|
|
21
21
|
import mlrun
|
|
22
22
|
import mlrun.model_monitoring.applications.context as mm_context
|
|
23
23
|
import mlrun.model_monitoring.applications.results as mm_results
|
|
24
|
-
from mlrun.serving.utils import
|
|
24
|
+
from mlrun.serving.utils import MonitoringApplicationToDict
|
|
25
25
|
|
|
26
26
|
|
|
27
|
-
class ModelMonitoringApplicationBaseV2(
|
|
27
|
+
class ModelMonitoringApplicationBaseV2(MonitoringApplicationToDict, ABC):
|
|
28
28
|
"""
|
|
29
29
|
A base class for a model monitoring application.
|
|
30
30
|
Inherit from this class to create a custom model monitoring application.
|
|
@@ -112,7 +112,7 @@ class ModelMonitoringApplicationBaseV2(StepToDict, ABC):
|
|
|
112
112
|
raise NotImplementedError
|
|
113
113
|
|
|
114
114
|
|
|
115
|
-
class ModelMonitoringApplicationBase(
|
|
115
|
+
class ModelMonitoringApplicationBase(MonitoringApplicationToDict, ABC):
|
|
116
116
|
"""
|
|
117
117
|
A base class for a model monitoring application.
|
|
118
118
|
Inherit from this class to create a custom model monitoring application.
|
|
@@ -31,17 +31,12 @@ class ObjectStoreFactory(enum.Enum):
|
|
|
31
31
|
def to_object_store(
|
|
32
32
|
self,
|
|
33
33
|
project: str,
|
|
34
|
-
|
|
35
|
-
secret_provider: typing.Callable = None,
|
|
34
|
+
**kwargs,
|
|
36
35
|
) -> StoreBase:
|
|
37
36
|
"""
|
|
38
37
|
Return a StoreBase object based on the provided enum value.
|
|
39
38
|
|
|
40
39
|
:param project: The name of the project.
|
|
41
|
-
:param access_key: Access key with permission to the DB table. Note that if access key is None
|
|
42
|
-
and the endpoint target is from type KV then the access key will be
|
|
43
|
-
retrieved from the environment variable.
|
|
44
|
-
:param secret_provider: An optional secret provider to get the connection string secret.
|
|
45
40
|
|
|
46
41
|
:return: `StoreBase` object.
|
|
47
42
|
|
|
@@ -50,10 +45,7 @@ class ObjectStoreFactory(enum.Enum):
|
|
|
50
45
|
if self == self.v3io_nosql:
|
|
51
46
|
from mlrun.model_monitoring.db.stores.v3io_kv.kv_store import KVStoreBase
|
|
52
47
|
|
|
53
|
-
|
|
54
|
-
access_key = access_key or mlrun.mlconf.get_v3io_access_key()
|
|
55
|
-
|
|
56
|
-
return KVStoreBase(project=project, access_key=access_key)
|
|
48
|
+
return KVStoreBase(project=project)
|
|
57
49
|
|
|
58
50
|
# Assuming SQL store target if store type is not KV.
|
|
59
51
|
# Update these lines once there are more than two store target types.
|
|
@@ -62,7 +54,7 @@ class ObjectStoreFactory(enum.Enum):
|
|
|
62
54
|
|
|
63
55
|
return SQLStoreBase(
|
|
64
56
|
project=project,
|
|
65
|
-
|
|
57
|
+
**kwargs,
|
|
66
58
|
)
|
|
67
59
|
|
|
68
60
|
@classmethod
|
|
@@ -79,7 +71,7 @@ class ObjectStoreFactory(enum.Enum):
|
|
|
79
71
|
def get_model_endpoint_store(
|
|
80
72
|
project: str,
|
|
81
73
|
access_key: str = None,
|
|
82
|
-
secret_provider: typing.Callable = None,
|
|
74
|
+
secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
|
|
83
75
|
) -> StoreBase:
|
|
84
76
|
# Leaving here for backwards compatibility
|
|
85
77
|
warnings.warn(
|
|
@@ -95,24 +87,38 @@ def get_model_endpoint_store(
|
|
|
95
87
|
|
|
96
88
|
def get_store_object(
|
|
97
89
|
project: str,
|
|
98
|
-
|
|
99
|
-
|
|
90
|
+
secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
|
|
91
|
+
**kwargs,
|
|
100
92
|
) -> StoreBase:
|
|
101
93
|
"""
|
|
102
|
-
|
|
94
|
+
Generate a store object. If a connection string is provided, the store type will be updated according to the
|
|
95
|
+
connection string. Currently, the supported store types are SQL and v3io-nosql.
|
|
103
96
|
|
|
104
97
|
:param project: The name of the project.
|
|
105
|
-
:param access_key: Access key with permission to the DB table.
|
|
106
98
|
:param secret_provider: An optional secret provider to get the connection string secret.
|
|
107
99
|
|
|
108
|
-
:return: `StoreBase` object. Using this object, the user can apply different operations
|
|
109
|
-
|
|
100
|
+
:return: `StoreBase` object. Using this object, the user can apply different operations such as write, update, get
|
|
101
|
+
and delete a model endpoint record.
|
|
110
102
|
"""
|
|
111
103
|
|
|
104
|
+
store_connection_string = mlrun.model_monitoring.helpers.get_connection_string(
|
|
105
|
+
secret_provider=secret_provider
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
if store_connection_string and (
|
|
109
|
+
store_connection_string.startswith("mysql")
|
|
110
|
+
or store_connection_string.startswith("sqlite")
|
|
111
|
+
):
|
|
112
|
+
store_type = mlrun.common.schemas.model_monitoring.ModelEndpointTarget.SQL
|
|
113
|
+
kwargs["store_connection_string"] = store_connection_string
|
|
114
|
+
else:
|
|
115
|
+
# Set the default store type if no connection has been set
|
|
116
|
+
store_type = mlrun.mlconf.model_endpoint_monitoring.store_type
|
|
117
|
+
|
|
112
118
|
# Get store type value from ObjectStoreFactory enum class
|
|
113
|
-
|
|
119
|
+
store_type_fact = ObjectStoreFactory(store_type)
|
|
114
120
|
|
|
115
121
|
# Convert into store target object
|
|
116
|
-
return
|
|
117
|
-
project=project,
|
|
122
|
+
return store_type_fact.to_object_store(
|
|
123
|
+
project=project, secret_provider=secret_provider, **kwargs
|
|
118
124
|
)
|
|
@@ -25,14 +25,15 @@ from sqlalchemy.sql.elements import BinaryExpression
|
|
|
25
25
|
|
|
26
26
|
import mlrun.common.model_monitoring.helpers
|
|
27
27
|
import mlrun.common.schemas.model_monitoring as mm_schemas
|
|
28
|
-
import mlrun.model_monitoring.db
|
|
29
28
|
import mlrun.model_monitoring.db.stores.sqldb.models
|
|
30
29
|
import mlrun.model_monitoring.helpers
|
|
31
30
|
from mlrun.common.db.sql_session import create_session, get_engine
|
|
31
|
+
from mlrun.model_monitoring.db import StoreBase
|
|
32
32
|
from mlrun.utils import datetime_now, logger
|
|
33
33
|
|
|
34
34
|
|
|
35
|
-
class SQLStoreBase(
|
|
35
|
+
class SQLStoreBase(StoreBase):
|
|
36
|
+
type: typing.ClassVar[str] = mm_schemas.ModelEndpointTarget.SQL
|
|
36
37
|
"""
|
|
37
38
|
Handles the DB operations when the DB target is from type SQL. For the SQL operations, we use SQLAlchemy, a Python
|
|
38
39
|
SQL toolkit that handles the communication with the database. When using SQL for storing the model monitoring
|
|
@@ -44,23 +45,22 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
|
|
|
44
45
|
def __init__(
|
|
45
46
|
self,
|
|
46
47
|
project: str,
|
|
47
|
-
|
|
48
|
+
**kwargs,
|
|
48
49
|
):
|
|
49
50
|
"""
|
|
50
51
|
Initialize SQL store target object.
|
|
51
52
|
|
|
52
53
|
:param project: The name of the project.
|
|
53
|
-
:param secret_provider: An optional secret provider to get the connection string secret.
|
|
54
54
|
"""
|
|
55
55
|
|
|
56
56
|
super().__init__(project=project)
|
|
57
57
|
|
|
58
|
-
|
|
59
|
-
mlrun.
|
|
60
|
-
|
|
58
|
+
if "store_connection_string" not in kwargs:
|
|
59
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
60
|
+
"connection_string is a required parameter for SQLStoreBase."
|
|
61
61
|
)
|
|
62
|
-
)
|
|
63
62
|
|
|
63
|
+
self._sql_connection_string = kwargs.get("store_connection_string")
|
|
64
64
|
self._engine = get_engine(dsn=self._sql_connection_string)
|
|
65
65
|
|
|
66
66
|
def _init_tables(self):
|