mlrun 1.8.0rc36__py3-none-any.whl → 1.8.0rc38__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of mlrun might be problematic. Click here for more details.
- mlrun/api/schemas/__init__.py +0 -3
- mlrun/common/model_monitoring/helpers.py +0 -13
- mlrun/common/schemas/__init__.py +1 -1
- mlrun/common/schemas/model_monitoring/__init__.py +0 -5
- mlrun/common/schemas/model_monitoring/constants.py +0 -22
- mlrun/common/schemas/model_monitoring/model_endpoints.py +0 -6
- mlrun/datastore/__init__.py +57 -16
- mlrun/datastore/datastore_profile.py +10 -7
- mlrun/datastore/sources.py +6 -17
- mlrun/datastore/storeytargets.py +29 -15
- mlrun/datastore/utils.py +73 -0
- mlrun/db/base.py +1 -0
- mlrun/db/httpdb.py +5 -0
- mlrun/db/nopdb.py +1 -0
- mlrun/feature_store/__init__.py +2 -0
- mlrun/feature_store/api.py +77 -0
- mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py +54 -0
- mlrun/model_monitoring/helpers.py +15 -27
- mlrun/model_monitoring/stream_processing.py +7 -34
- mlrun/serving/states.py +30 -1
- mlrun/utils/version/version.json +2 -2
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/METADATA +1 -1
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/RECORD +27 -27
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/WHEEL +1 -1
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/LICENSE +0 -0
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/entry_points.txt +0 -0
- {mlrun-1.8.0rc36.dist-info → mlrun-1.8.0rc38.dist-info}/top_level.txt +0 -0
mlrun/api/schemas/__init__.py
CHANGED
|
@@ -208,9 +208,6 @@ ModelEndpointList = DeprecationHelper(mlrun.common.schemas.ModelEndpointList)
|
|
|
208
208
|
ModelEndpointMetadata = DeprecationHelper(mlrun.common.schemas.ModelEndpointMetadata)
|
|
209
209
|
ModelEndpointSpec = DeprecationHelper(mlrun.common.schemas.ModelEndpointSpec)
|
|
210
210
|
ModelEndpointStatus = DeprecationHelper(mlrun.common.schemas.ModelEndpointStatus)
|
|
211
|
-
ModelMonitoringStoreKinds = DeprecationHelper(
|
|
212
|
-
mlrun.common.schemas.ModelMonitoringStoreKinds
|
|
213
|
-
)
|
|
214
211
|
NotificationSeverity = DeprecationHelper(mlrun.common.schemas.NotificationSeverity)
|
|
215
212
|
NotificationStatus = DeprecationHelper(mlrun.common.schemas.NotificationStatus)
|
|
216
213
|
ObjectKind = DeprecationHelper(mlrun.common.schemas.ObjectKind)
|
|
@@ -50,19 +50,6 @@ def get_kafka_topic(project: str, function_name: typing.Optional[str] = None) ->
|
|
|
50
50
|
)
|
|
51
51
|
|
|
52
52
|
|
|
53
|
-
def parse_monitoring_stream_path(
|
|
54
|
-
stream_uri: str, project: str, function_name: typing.Optional[str] = None
|
|
55
|
-
) -> str:
|
|
56
|
-
if stream_uri.startswith("kafka://"):
|
|
57
|
-
if "?topic" in stream_uri:
|
|
58
|
-
raise mlrun.errors.MLRunValueError("Custom kafka topic is not allowed")
|
|
59
|
-
# Add topic to stream kafka uri
|
|
60
|
-
topic = get_kafka_topic(project=project, function_name=function_name)
|
|
61
|
-
stream_uri += f"?topic={topic}"
|
|
62
|
-
|
|
63
|
-
return stream_uri
|
|
64
|
-
|
|
65
|
-
|
|
66
53
|
def _get_counts(hist: Histogram) -> BinCounts:
|
|
67
54
|
"""Return the histogram counts"""
|
|
68
55
|
return BinCounts(hist[0])
|
mlrun/common/schemas/__init__.py
CHANGED
|
@@ -139,6 +139,7 @@ from .model_monitoring import (
|
|
|
139
139
|
Features,
|
|
140
140
|
FeatureSetFeatures,
|
|
141
141
|
FeatureValues,
|
|
142
|
+
FileTargetKind,
|
|
142
143
|
GrafanaColumn,
|
|
143
144
|
GrafanaNumberColumn,
|
|
144
145
|
GrafanaStringColumn,
|
|
@@ -151,7 +152,6 @@ from .model_monitoring import (
|
|
|
151
152
|
ModelEndpointSpec,
|
|
152
153
|
ModelEndpointStatus,
|
|
153
154
|
ModelMonitoringMode,
|
|
154
|
-
ModelMonitoringStoreKinds,
|
|
155
155
|
MonitoringFunctionNames,
|
|
156
156
|
TSDBTarget,
|
|
157
157
|
V3IOTSDBTables,
|
|
@@ -14,9 +14,7 @@
|
|
|
14
14
|
|
|
15
15
|
from .constants import (
|
|
16
16
|
INTERSECT_DICT_KEYS,
|
|
17
|
-
V3IO_MODEL_MONITORING_DB,
|
|
18
17
|
ApplicationEvent,
|
|
19
|
-
ControllerPolicy,
|
|
20
18
|
DriftStatus,
|
|
21
19
|
EndpointType,
|
|
22
20
|
EndpointUID,
|
|
@@ -30,10 +28,7 @@ from .constants import (
|
|
|
30
28
|
ModelEndpointCreationStrategy,
|
|
31
29
|
ModelEndpointMonitoringMetricType,
|
|
32
30
|
ModelEndpointSchema,
|
|
33
|
-
ModelEndpointTarget,
|
|
34
|
-
ModelEndpointTargetSchemas,
|
|
35
31
|
ModelMonitoringMode,
|
|
36
|
-
ModelMonitoringStoreKinds,
|
|
37
32
|
MonitoringFunctionNames,
|
|
38
33
|
PredictionsQueryConstants,
|
|
39
34
|
ProjectSecretKeys,
|
|
@@ -240,11 +240,6 @@ class EventKeyMetrics:
|
|
|
240
240
|
REAL_TIME = "real_time"
|
|
241
241
|
|
|
242
242
|
|
|
243
|
-
class ModelEndpointTarget(MonitoringStrEnum):
|
|
244
|
-
V3IO_NOSQL = "v3io-nosql"
|
|
245
|
-
SQL = "sql"
|
|
246
|
-
|
|
247
|
-
|
|
248
243
|
class TSDBTarget(MonitoringStrEnum):
|
|
249
244
|
V3IO_TSDB = "v3io-tsdb"
|
|
250
245
|
TDEngine = "tdengine"
|
|
@@ -269,17 +264,6 @@ class GetEventsFormat(MonitoringStrEnum):
|
|
|
269
264
|
INTERSECTION = "intersection"
|
|
270
265
|
|
|
271
266
|
|
|
272
|
-
class ModelEndpointTargetSchemas(MonitoringStrEnum):
|
|
273
|
-
V3IO = "v3io"
|
|
274
|
-
MYSQL = "mysql"
|
|
275
|
-
SQLITE = "sqlite"
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
class ModelMonitoringStoreKinds:
|
|
279
|
-
ENDPOINTS = "endpoints"
|
|
280
|
-
EVENTS = "events"
|
|
281
|
-
|
|
282
|
-
|
|
283
267
|
class FileTargetKind:
|
|
284
268
|
ENDPOINTS = "endpoints"
|
|
285
269
|
EVENTS = "events"
|
|
@@ -429,10 +413,6 @@ class ModelMonitoringAppLabel:
|
|
|
429
413
|
return f"{self.KEY}={self.VAL}"
|
|
430
414
|
|
|
431
415
|
|
|
432
|
-
class ControllerPolicy:
|
|
433
|
-
BASE_PERIOD = "base_period"
|
|
434
|
-
|
|
435
|
-
|
|
436
416
|
class HistogramDataDriftApplicationConstants:
|
|
437
417
|
NAME = "histogram-data-drift"
|
|
438
418
|
GENERAL_RESULT_NAME = "general_drift"
|
|
@@ -449,8 +429,6 @@ class SpecialApps:
|
|
|
449
429
|
|
|
450
430
|
_RESERVED_FUNCTION_NAMES = MonitoringFunctionNames.list() + [SpecialApps.MLRUN_INFRA]
|
|
451
431
|
|
|
452
|
-
V3IO_MODEL_MONITORING_DB = "v3io"
|
|
453
|
-
|
|
454
432
|
|
|
455
433
|
class ModelEndpointMonitoringMetricType(StrEnum):
|
|
456
434
|
RESULT = "result"
|
|
@@ -36,12 +36,6 @@ from .constants import (
|
|
|
36
36
|
Model = TypeVar("Model", bound=BaseModel)
|
|
37
37
|
|
|
38
38
|
|
|
39
|
-
class ModelMonitoringStoreKinds:
|
|
40
|
-
# TODO: do changes in examples & demos In 1.5.0 remove
|
|
41
|
-
ENDPOINTS = "endpoints"
|
|
42
|
-
EVENTS = "events"
|
|
43
|
-
|
|
44
|
-
|
|
45
39
|
class Histogram(BaseModel):
|
|
46
40
|
buckets: list[float]
|
|
47
41
|
counts: list[int]
|
mlrun/datastore/__init__.py
CHANGED
|
@@ -34,9 +34,17 @@ __all__ = [
|
|
|
34
34
|
"VectorStoreCollection",
|
|
35
35
|
]
|
|
36
36
|
|
|
37
|
+
from urllib.parse import urlparse
|
|
38
|
+
|
|
37
39
|
import fsspec
|
|
40
|
+
from mergedeep import merge
|
|
38
41
|
|
|
39
42
|
import mlrun.datastore.wasbfs
|
|
43
|
+
from mlrun.datastore.datastore_profile import (
|
|
44
|
+
DatastoreProfileKafkaSource,
|
|
45
|
+
DatastoreProfileKafkaTarget,
|
|
46
|
+
DatastoreProfileV3io,
|
|
47
|
+
)
|
|
40
48
|
from mlrun.platforms.iguazio import (
|
|
41
49
|
HTTPOutputStream,
|
|
42
50
|
KafkaOutputStream,
|
|
@@ -106,23 +114,56 @@ def get_stream_pusher(stream_path: str, **kwargs):
|
|
|
106
114
|
|
|
107
115
|
:param stream_path: path/url of stream
|
|
108
116
|
"""
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
117
|
+
if stream_path.startswith("ds://"):
|
|
118
|
+
datastore_profile = mlrun.datastore.datastore_profile.datastore_profile_read(
|
|
119
|
+
stream_path
|
|
120
|
+
)
|
|
121
|
+
if isinstance(
|
|
122
|
+
datastore_profile,
|
|
123
|
+
(DatastoreProfileKafkaSource, DatastoreProfileKafkaTarget),
|
|
124
|
+
):
|
|
125
|
+
attributes = datastore_profile.attributes()
|
|
126
|
+
brokers = attributes.pop("brokers", None)
|
|
127
|
+
# Override the topic with the one in the url (if any)
|
|
128
|
+
parsed_url = urlparse(stream_path)
|
|
129
|
+
topic = (
|
|
130
|
+
parsed_url.path.strip("/")
|
|
131
|
+
if parsed_url.path
|
|
132
|
+
else datastore_profile.get_topic()
|
|
133
|
+
)
|
|
134
|
+
producer_options = mlrun.datastore.utils.KafkaParameters(
|
|
135
|
+
attributes
|
|
136
|
+
).producer()
|
|
137
|
+
return KafkaOutputStream(topic, brokers, producer_options=producer_options)
|
|
138
|
+
|
|
139
|
+
elif isinstance(datastore_profile, DatastoreProfileV3io):
|
|
140
|
+
parsed_url = urlparse(stream_path)
|
|
141
|
+
stream_path = datastore_profile.url(parsed_url.path)
|
|
142
|
+
endpoint, stream_path = parse_path(stream_path)
|
|
143
|
+
return OutputStream(stream_path, endpoint=endpoint, **kwargs)
|
|
144
|
+
else:
|
|
145
|
+
raise ValueError(
|
|
146
|
+
f"Unsupported datastore profile type: {type(datastore_profile)}"
|
|
147
|
+
)
|
|
124
148
|
else:
|
|
125
|
-
|
|
149
|
+
kafka_brokers = get_kafka_brokers_from_dict(kwargs)
|
|
150
|
+
if stream_path.startswith("kafka://") or kafka_brokers:
|
|
151
|
+
topic, brokers = parse_kafka_url(stream_path, kafka_brokers)
|
|
152
|
+
return KafkaOutputStream(
|
|
153
|
+
topic, brokers, kwargs.get("kafka_producer_options")
|
|
154
|
+
)
|
|
155
|
+
elif stream_path.startswith("http://") or stream_path.startswith("https://"):
|
|
156
|
+
return HTTPOutputStream(stream_path=stream_path)
|
|
157
|
+
elif "://" not in stream_path:
|
|
158
|
+
return OutputStream(stream_path, **kwargs)
|
|
159
|
+
elif stream_path.startswith("v3io"):
|
|
160
|
+
endpoint, stream_path = parse_path(stream_path)
|
|
161
|
+
endpoint = kwargs.pop("endpoint", None) or endpoint
|
|
162
|
+
return OutputStream(stream_path, endpoint=endpoint, **kwargs)
|
|
163
|
+
elif stream_path.startswith("dummy://"):
|
|
164
|
+
return _DummyStream(**kwargs)
|
|
165
|
+
else:
|
|
166
|
+
raise ValueError(f"unsupported stream path {stream_path}")
|
|
126
167
|
|
|
127
168
|
|
|
128
169
|
class _DummyStream:
|
|
@@ -171,6 +171,9 @@ class DatastoreProfileKafkaTarget(DatastoreProfile):
|
|
|
171
171
|
FutureWarning,
|
|
172
172
|
)
|
|
173
173
|
|
|
174
|
+
def get_topic(self) -> typing.Optional[str]:
|
|
175
|
+
return self.topic
|
|
176
|
+
|
|
174
177
|
def attributes(self):
|
|
175
178
|
attributes = {"brokers": self.brokers or self.bootstrap_servers}
|
|
176
179
|
if self.kwargs_public:
|
|
@@ -193,6 +196,10 @@ class DatastoreProfileKafkaSource(DatastoreProfile):
|
|
|
193
196
|
kwargs_public: typing.Optional[dict]
|
|
194
197
|
kwargs_private: typing.Optional[dict]
|
|
195
198
|
|
|
199
|
+
def get_topic(self) -> typing.Optional[str]:
|
|
200
|
+
topics = [self.topics] if isinstance(self.topics, str) else self.topics
|
|
201
|
+
return topics[0] if topics else None
|
|
202
|
+
|
|
196
203
|
def attributes(self) -> dict[str, typing.Any]:
|
|
197
204
|
attributes = {}
|
|
198
205
|
if self.kwargs_public:
|
|
@@ -209,13 +216,9 @@ class DatastoreProfileKafkaSource(DatastoreProfile):
|
|
|
209
216
|
attributes["initial_offset"] = self.initial_offset
|
|
210
217
|
if self.partitions is not None:
|
|
211
218
|
attributes["partitions"] = self.partitions
|
|
212
|
-
sasl
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
sasl["user"] = self.sasl_user
|
|
216
|
-
sasl["password"] = self.sasl_pass
|
|
217
|
-
sasl["mechanism"] = "PLAIN"
|
|
218
|
-
if sasl:
|
|
219
|
+
if sasl := mlrun.datastore.utils.KafkaParameters(attributes).sasl(
|
|
220
|
+
usr=self.sasl_user, pwd=self.sasl_pass
|
|
221
|
+
):
|
|
219
222
|
attributes["sasl"] = sasl
|
|
220
223
|
return attributes
|
|
221
224
|
|
mlrun/datastore/sources.py
CHANGED
|
@@ -1100,13 +1100,9 @@ class KafkaSource(OnlineSource):
|
|
|
1100
1100
|
attributes["initial_offset"] = initial_offset
|
|
1101
1101
|
if partitions is not None:
|
|
1102
1102
|
attributes["partitions"] = partitions
|
|
1103
|
-
sasl
|
|
1104
|
-
|
|
1105
|
-
|
|
1106
|
-
sasl["user"] = sasl_user
|
|
1107
|
-
sasl["password"] = sasl_pass
|
|
1108
|
-
sasl["mechanism"] = "PLAIN"
|
|
1109
|
-
if sasl:
|
|
1103
|
+
if sasl := mlrun.datastore.utils.KafkaParameters(attributes).sasl(
|
|
1104
|
+
usr=sasl_user, pwd=sasl_pass
|
|
1105
|
+
):
|
|
1110
1106
|
attributes["sasl"] = sasl
|
|
1111
1107
|
super().__init__(attributes=attributes, **kwargs)
|
|
1112
1108
|
|
|
@@ -1207,16 +1203,9 @@ class KafkaSource(OnlineSource):
|
|
|
1207
1203
|
]
|
|
1208
1204
|
|
|
1209
1205
|
kafka_admin_kwargs = {}
|
|
1210
|
-
|
|
1211
|
-
|
|
1212
|
-
|
|
1213
|
-
{
|
|
1214
|
-
"security_protocol": "SASL_PLAINTEXT",
|
|
1215
|
-
"sasl_mechanism": sasl["mechanism"],
|
|
1216
|
-
"sasl_plain_username": sasl["user"],
|
|
1217
|
-
"sasl_plain_password": sasl["password"],
|
|
1218
|
-
}
|
|
1219
|
-
)
|
|
1206
|
+
kafka_admin_kwargs = mlrun.datastore.utils.KafkaParameters(
|
|
1207
|
+
self.attributes
|
|
1208
|
+
).admin()
|
|
1220
1209
|
|
|
1221
1210
|
kafka_admin = KafkaAdminClient(bootstrap_servers=brokers, **kafka_admin_kwargs)
|
|
1222
1211
|
try:
|
mlrun/datastore/storeytargets.py
CHANGED
|
@@ -11,6 +11,8 @@
|
|
|
11
11
|
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
12
|
# See the License for the specific language governing permissions and
|
|
13
13
|
# limitations under the License.
|
|
14
|
+
from urllib.parse import urlparse
|
|
15
|
+
|
|
14
16
|
import storey
|
|
15
17
|
from mergedeep import merge
|
|
16
18
|
from storey import V3ioDriver
|
|
@@ -18,6 +20,12 @@ from storey import V3ioDriver
|
|
|
18
20
|
import mlrun
|
|
19
21
|
import mlrun.model_monitoring.helpers
|
|
20
22
|
from mlrun.datastore.base import DataStore
|
|
23
|
+
from mlrun.datastore.datastore_profile import (
|
|
24
|
+
DatastoreProfileKafkaSource,
|
|
25
|
+
DatastoreProfileKafkaTarget,
|
|
26
|
+
TDEngineDatastoreProfile,
|
|
27
|
+
datastore_profile_read,
|
|
28
|
+
)
|
|
21
29
|
|
|
22
30
|
from ..platforms.iguazio import parse_path
|
|
23
31
|
from .utils import (
|
|
@@ -44,13 +52,8 @@ def get_url_and_storage_options(path, external_storage_options=None):
|
|
|
44
52
|
class TDEngineStoreyTarget(storey.TDEngineTarget):
|
|
45
53
|
def __init__(self, *args, url: str, **kwargs):
|
|
46
54
|
if url.startswith("ds://"):
|
|
47
|
-
datastore_profile = (
|
|
48
|
-
|
|
49
|
-
)
|
|
50
|
-
if not isinstance(
|
|
51
|
-
datastore_profile,
|
|
52
|
-
mlrun.datastore.datastore_profile.TDEngineDatastoreProfile,
|
|
53
|
-
):
|
|
55
|
+
datastore_profile = datastore_profile_read(url)
|
|
56
|
+
if not isinstance(datastore_profile, TDEngineDatastoreProfile):
|
|
54
57
|
raise ValueError(
|
|
55
58
|
f"Unexpected datastore profile type:{datastore_profile.type}."
|
|
56
59
|
"Only TDEngineDatastoreProfile is supported"
|
|
@@ -126,16 +129,24 @@ class StreamStoreyTarget(storey.StreamTarget):
|
|
|
126
129
|
class KafkaStoreyTarget(storey.KafkaTarget):
|
|
127
130
|
def __init__(self, *args, **kwargs):
|
|
128
131
|
path = kwargs.pop("path")
|
|
129
|
-
attributes = kwargs.pop("attributes",
|
|
132
|
+
attributes = kwargs.pop("attributes", {})
|
|
130
133
|
if path and path.startswith("ds://"):
|
|
131
|
-
datastore_profile = (
|
|
132
|
-
|
|
133
|
-
|
|
134
|
+
datastore_profile = datastore_profile_read(path)
|
|
135
|
+
if not isinstance(
|
|
136
|
+
datastore_profile,
|
|
137
|
+
(DatastoreProfileKafkaSource, DatastoreProfileKafkaTarget),
|
|
138
|
+
):
|
|
139
|
+
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
140
|
+
f"Unsupported datastore profile type: {type(datastore_profile)}"
|
|
141
|
+
)
|
|
142
|
+
|
|
134
143
|
attributes = merge(attributes, datastore_profile.attributes())
|
|
135
|
-
brokers = attributes.pop(
|
|
136
|
-
|
|
144
|
+
brokers = attributes.pop("brokers", None)
|
|
145
|
+
# Override the topic with the one in the url (if any)
|
|
146
|
+
parsed = urlparse(path)
|
|
147
|
+
topic = (
|
|
148
|
+
parsed.path.strip("/") if parsed.path else datastore_profile.get_topic()
|
|
137
149
|
)
|
|
138
|
-
topic = datastore_profile.topic
|
|
139
150
|
else:
|
|
140
151
|
brokers = attributes.pop(
|
|
141
152
|
"brokers", attributes.pop("bootstrap_servers", None)
|
|
@@ -146,7 +157,10 @@ class KafkaStoreyTarget(storey.KafkaTarget):
|
|
|
146
157
|
raise mlrun.errors.MLRunInvalidArgumentError("KafkaTarget requires a topic")
|
|
147
158
|
kwargs["brokers"] = brokers
|
|
148
159
|
kwargs["topic"] = topic
|
|
149
|
-
|
|
160
|
+
|
|
161
|
+
attributes = mlrun.datastore.utils.KafkaParameters(attributes).producer()
|
|
162
|
+
|
|
163
|
+
super().__init__(*args, **kwargs, producer_options=attributes)
|
|
150
164
|
|
|
151
165
|
|
|
152
166
|
class NoSqlStoreyTarget(storey.NoSqlTarget):
|
mlrun/datastore/utils.py
CHANGED
|
@@ -222,3 +222,76 @@ def validate_additional_filters(additional_filters):
|
|
|
222
222
|
for sub_value in value:
|
|
223
223
|
if isinstance(sub_value, float) and math.isnan(sub_value):
|
|
224
224
|
raise mlrun.errors.MLRunInvalidArgumentError(nan_error_message)
|
|
225
|
+
|
|
226
|
+
|
|
227
|
+
class KafkaParameters:
|
|
228
|
+
def __init__(self, kwargs: dict):
|
|
229
|
+
import kafka
|
|
230
|
+
|
|
231
|
+
self._kafka = kafka
|
|
232
|
+
self._kwargs = kwargs
|
|
233
|
+
self._client_configs = {
|
|
234
|
+
"consumer": self._kafka.KafkaConsumer.DEFAULT_CONFIG,
|
|
235
|
+
"producer": self._kafka.KafkaProducer.DEFAULT_CONFIG,
|
|
236
|
+
"admin": self._kafka.KafkaAdminClient.DEFAULT_CONFIG,
|
|
237
|
+
}
|
|
238
|
+
self._custom_attributes = {
|
|
239
|
+
"max_workers": "",
|
|
240
|
+
"brokers": "",
|
|
241
|
+
"topics": "",
|
|
242
|
+
"group": "",
|
|
243
|
+
"initial_offset": "",
|
|
244
|
+
"partitions": "",
|
|
245
|
+
"sasl": "",
|
|
246
|
+
"worker_allocation_mode": "",
|
|
247
|
+
}
|
|
248
|
+
self._validate_keys()
|
|
249
|
+
|
|
250
|
+
def _validate_keys(self) -> None:
|
|
251
|
+
reference_dicts = (
|
|
252
|
+
self._custom_attributes,
|
|
253
|
+
self._kafka.KafkaAdminClient.DEFAULT_CONFIG,
|
|
254
|
+
self._kafka.KafkaProducer.DEFAULT_CONFIG,
|
|
255
|
+
self._kafka.KafkaConsumer.DEFAULT_CONFIG,
|
|
256
|
+
)
|
|
257
|
+
for key in self._kwargs:
|
|
258
|
+
if all(key not in d for d in reference_dicts):
|
|
259
|
+
raise ValueError(
|
|
260
|
+
f"Key '{key}' not found in any of the Kafka reference dictionaries"
|
|
261
|
+
)
|
|
262
|
+
|
|
263
|
+
def _get_config(self, client_type: str) -> dict:
|
|
264
|
+
res = {
|
|
265
|
+
k: self._kwargs[k]
|
|
266
|
+
for k in self._kwargs.keys() & self._client_configs[client_type].keys()
|
|
267
|
+
}
|
|
268
|
+
if sasl := self._kwargs.get("sasl"):
|
|
269
|
+
res |= {
|
|
270
|
+
"security_protocol": "SASL_PLAINTEXT",
|
|
271
|
+
"sasl_mechanism": sasl["mechanism"],
|
|
272
|
+
"sasl_plain_username": sasl["user"],
|
|
273
|
+
"sasl_plain_password": sasl["password"],
|
|
274
|
+
}
|
|
275
|
+
return res
|
|
276
|
+
|
|
277
|
+
def consumer(self) -> dict:
|
|
278
|
+
return self._get_config("consumer")
|
|
279
|
+
|
|
280
|
+
def producer(self) -> dict:
|
|
281
|
+
return self._get_config("producer")
|
|
282
|
+
|
|
283
|
+
def admin(self) -> dict:
|
|
284
|
+
return self._get_config("admin")
|
|
285
|
+
|
|
286
|
+
def sasl(
|
|
287
|
+
self, *, usr: typing.Optional[str] = None, pwd: typing.Optional[str] = None
|
|
288
|
+
) -> dict:
|
|
289
|
+
usr = usr or self._kwargs.get("sasl_plain_username", None)
|
|
290
|
+
pwd = pwd or self._kwargs.get("sasl_plain_password", None)
|
|
291
|
+
res = self._kwargs.get("sasl", {})
|
|
292
|
+
if usr and pwd:
|
|
293
|
+
res["enable"] = True
|
|
294
|
+
res["user"] = usr
|
|
295
|
+
res["password"] = pwd
|
|
296
|
+
res["mechanism"] = self._kwargs.get("sasl_mechanism", "PLAIN")
|
|
297
|
+
return res
|
mlrun/db/base.py
CHANGED
|
@@ -257,6 +257,7 @@ class RunDBInterface(ABC):
|
|
|
257
257
|
tag: Optional[str] = None,
|
|
258
258
|
kind: Optional[str] = None,
|
|
259
259
|
labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
|
|
260
|
+
states: Optional[list[mlrun.common.schemas.FunctionState]] = None,
|
|
260
261
|
format_: mlrun.common.formatters.FunctionFormat = mlrun.common.formatters.FunctionFormat.full,
|
|
261
262
|
since: Optional[datetime.datetime] = None,
|
|
262
263
|
until: Optional[datetime.datetime] = None,
|
mlrun/db/httpdb.py
CHANGED
|
@@ -1497,6 +1497,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
1497
1497
|
until: Optional[datetime] = None,
|
|
1498
1498
|
kind: Optional[str] = None,
|
|
1499
1499
|
format_: mlrun.common.formatters.FunctionFormat = mlrun.common.formatters.FunctionFormat.full,
|
|
1500
|
+
states: typing.Optional[list[mlrun.common.schemas.FunctionState]] = None,
|
|
1500
1501
|
):
|
|
1501
1502
|
"""Retrieve a list of functions, filtered by specific criteria.
|
|
1502
1503
|
|
|
@@ -1514,6 +1515,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
1514
1515
|
:param until: Return functions updated before this date (as datetime object).
|
|
1515
1516
|
:param kind: Return only functions of a specific kind.
|
|
1516
1517
|
:param format_: The format in which to return the functions. Default is 'full'.
|
|
1518
|
+
:param states: Return only functions whose state is one of the provided states.
|
|
1517
1519
|
:returns: List of function objects (as dictionary).
|
|
1518
1520
|
"""
|
|
1519
1521
|
functions, _ = self._list_functions(
|
|
@@ -1525,6 +1527,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
1525
1527
|
format_=format_,
|
|
1526
1528
|
since=since,
|
|
1527
1529
|
until=until,
|
|
1530
|
+
states=states,
|
|
1528
1531
|
return_all=True,
|
|
1529
1532
|
)
|
|
1530
1533
|
return functions
|
|
@@ -5135,6 +5138,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
5135
5138
|
format_: Optional[str] = None,
|
|
5136
5139
|
since: Optional[datetime] = None,
|
|
5137
5140
|
until: Optional[datetime] = None,
|
|
5141
|
+
states: typing.Optional[list[mlrun.common.schemas.FunctionState]] = None,
|
|
5138
5142
|
page: Optional[int] = None,
|
|
5139
5143
|
page_size: Optional[int] = None,
|
|
5140
5144
|
page_token: Optional[str] = None,
|
|
@@ -5152,6 +5156,7 @@ class HTTPRunDB(RunDBInterface):
|
|
|
5152
5156
|
"since": datetime_to_iso(since),
|
|
5153
5157
|
"until": datetime_to_iso(until),
|
|
5154
5158
|
"format": format_,
|
|
5159
|
+
"state": states or None,
|
|
5155
5160
|
"page": page,
|
|
5156
5161
|
"page-size": page_size,
|
|
5157
5162
|
"page-token": page_token,
|
mlrun/db/nopdb.py
CHANGED
|
@@ -274,6 +274,7 @@ class NopDB(RunDBInterface):
|
|
|
274
274
|
tag: Optional[str] = None,
|
|
275
275
|
kind: Optional[str] = None,
|
|
276
276
|
labels: Optional[Union[str, dict[str, Optional[str]], list[str]]] = None,
|
|
277
|
+
states: Optional[list[mlrun.common.schemas.FunctionState]] = None,
|
|
277
278
|
format_: mlrun.common.formatters.FunctionFormat = mlrun.common.formatters.FunctionFormat.full,
|
|
278
279
|
since: Optional[datetime.datetime] = None,
|
|
279
280
|
until: Optional[datetime.datetime] = None,
|
mlrun/feature_store/__init__.py
CHANGED
|
@@ -13,6 +13,7 @@
|
|
|
13
13
|
# limitations under the License.
|
|
14
14
|
|
|
15
15
|
__all__ = [
|
|
16
|
+
"ingest",
|
|
16
17
|
"delete_feature_set",
|
|
17
18
|
"delete_feature_vector",
|
|
18
19
|
"get_feature_set",
|
|
@@ -35,6 +36,7 @@ from .api import (
|
|
|
35
36
|
delete_feature_vector,
|
|
36
37
|
get_feature_set,
|
|
37
38
|
get_feature_vector,
|
|
39
|
+
ingest,
|
|
38
40
|
)
|
|
39
41
|
from .common import RunConfig
|
|
40
42
|
from .feature_set import FeatureSet
|
mlrun/feature_store/api.py
CHANGED
|
@@ -244,6 +244,83 @@ def _get_namespace(run_config: RunConfig) -> dict[str, Any]:
|
|
|
244
244
|
return get_caller_globals()
|
|
245
245
|
|
|
246
246
|
|
|
247
|
+
def ingest(
|
|
248
|
+
mlrun_context: Union["mlrun.MLrunProject", "mlrun.MLClientCtx"],
|
|
249
|
+
featureset: Union[FeatureSet, str] = None,
|
|
250
|
+
source=None,
|
|
251
|
+
targets: Optional[list[DataTargetBase]] = None,
|
|
252
|
+
namespace=None,
|
|
253
|
+
return_df: bool = True,
|
|
254
|
+
infer_options: InferOptions = InferOptions.default(),
|
|
255
|
+
run_config: RunConfig = None,
|
|
256
|
+
spark_context=None,
|
|
257
|
+
overwrite=None,
|
|
258
|
+
) -> Optional[pd.DataFrame]:
|
|
259
|
+
"""Read local DataFrame, file, URL, or source into the feature store
|
|
260
|
+
Ingest reads from the source, run the graph transformations, infers metadata and stats
|
|
261
|
+
and writes the results to the default of specified targets
|
|
262
|
+
|
|
263
|
+
when targets are not specified data is stored in the configured default targets
|
|
264
|
+
(will usually be NoSQL for real-time and Parquet for offline).
|
|
265
|
+
|
|
266
|
+
the `run_config` parameter allow specifying the function and job configuration,
|
|
267
|
+
see: :py:class:`~mlrun.feature_store.RunConfig`
|
|
268
|
+
|
|
269
|
+
example::
|
|
270
|
+
|
|
271
|
+
stocks_set = FeatureSet("stocks", entities=[Entity("ticker")])
|
|
272
|
+
stocks = pd.read_csv("stocks.csv")
|
|
273
|
+
df = ingest(stocks_set, stocks, infer_options=fstore.InferOptions.default())
|
|
274
|
+
|
|
275
|
+
# for running as remote job
|
|
276
|
+
config = RunConfig(image="mlrun/mlrun")
|
|
277
|
+
df = ingest(stocks_set, stocks, run_config=config)
|
|
278
|
+
|
|
279
|
+
# specify source and targets
|
|
280
|
+
source = CSVSource("mycsv", path="measurements.csv")
|
|
281
|
+
targets = [CSVTarget("mycsv", path="./mycsv.csv")]
|
|
282
|
+
ingest(measurements, source, targets)
|
|
283
|
+
|
|
284
|
+
:param mlrun_context: mlrun context
|
|
285
|
+
:param featureset: feature set object or featureset.uri. (uri must be of a feature set that is in the DB,
|
|
286
|
+
call `.save()` if it's not)
|
|
287
|
+
:param source: source dataframe or other sources (e.g. parquet source see:
|
|
288
|
+
:py:class:`~mlrun.datastore.ParquetSource` and other classes in mlrun.datastore with suffix
|
|
289
|
+
Source)
|
|
290
|
+
:param targets: optional list of data target objects
|
|
291
|
+
:param namespace: namespace or module containing graph classes
|
|
292
|
+
:param return_df: indicate if to return a dataframe with the graph results
|
|
293
|
+
:param infer_options: schema (for discovery of entities, features in featureset), index, stats,
|
|
294
|
+
histogram and preview infer options (:py:class:`~mlrun.feature_store.InferOptions`)
|
|
295
|
+
:param run_config: function and/or run configuration for remote jobs,
|
|
296
|
+
see :py:class:`~mlrun.feature_store.RunConfig`
|
|
297
|
+
:param spark_context: local spark session for spark ingestion, example for creating the spark context:
|
|
298
|
+
`spark = SparkSession.builder.appName("Spark function").getOrCreate()`
|
|
299
|
+
For remote spark ingestion, this should contain the remote spark service name
|
|
300
|
+
:param overwrite: delete the targets' data prior to ingestion
|
|
301
|
+
(default: True for non scheduled ingest - deletes the targets that are about to be ingested.
|
|
302
|
+
False for scheduled ingest - does not delete the target)
|
|
303
|
+
:return: if return_df is True, a dataframe will be returned based on the graph
|
|
304
|
+
"""
|
|
305
|
+
if not mlrun_context:
|
|
306
|
+
raise mlrun.errors.MLRunValueError(
|
|
307
|
+
"mlrun_context must be defined when calling ingest()"
|
|
308
|
+
)
|
|
309
|
+
|
|
310
|
+
return _ingest(
|
|
311
|
+
featureset,
|
|
312
|
+
source,
|
|
313
|
+
targets,
|
|
314
|
+
namespace,
|
|
315
|
+
return_df,
|
|
316
|
+
infer_options,
|
|
317
|
+
run_config,
|
|
318
|
+
mlrun_context,
|
|
319
|
+
spark_context,
|
|
320
|
+
overwrite,
|
|
321
|
+
)
|
|
322
|
+
|
|
323
|
+
|
|
247
324
|
def _ingest(
|
|
248
325
|
featureset: Union[FeatureSet, str] = None,
|
|
249
326
|
source=None,
|
|
@@ -187,6 +187,12 @@ class TDEngineConnector(TSDBConnector):
|
|
|
187
187
|
"Invalid 'endpoint_id' filter: must be a string or a list."
|
|
188
188
|
)
|
|
189
189
|
|
|
190
|
+
def _drop_database_query(self) -> str:
|
|
191
|
+
return f"DROP DATABASE IF EXISTS {self.database};"
|
|
192
|
+
|
|
193
|
+
def _get_table_name_query(self) -> str:
|
|
194
|
+
return f"SELECT table_name FROM information_schema.ins_tables where db_name='{self.database}' LIMIT 1;"
|
|
195
|
+
|
|
190
196
|
def apply_monitoring_stream_steps(self, graph, **kwarg):
|
|
191
197
|
"""
|
|
192
198
|
Apply TSDB steps on the provided monitoring graph. Throughout these steps, the graph stores live data of
|
|
@@ -297,6 +303,54 @@ class TDEngineConnector(TSDBConnector):
|
|
|
297
303
|
project=self.project,
|
|
298
304
|
)
|
|
299
305
|
|
|
306
|
+
# Check if database is empty and if so, drop it
|
|
307
|
+
self._drop_database_if_empty()
|
|
308
|
+
|
|
309
|
+
def _drop_database_if_empty(self):
|
|
310
|
+
query_random_table_name = self._get_table_name_query()
|
|
311
|
+
drop_database = False
|
|
312
|
+
try:
|
|
313
|
+
table_name = self.connection.run(
|
|
314
|
+
query=query_random_table_name,
|
|
315
|
+
timeout=self._timeout,
|
|
316
|
+
retries=self._retries,
|
|
317
|
+
)
|
|
318
|
+
if len(table_name.data) == 0:
|
|
319
|
+
# no tables were found under the database
|
|
320
|
+
drop_database = True
|
|
321
|
+
|
|
322
|
+
except Exception as e:
|
|
323
|
+
logger.warning(
|
|
324
|
+
"Failed to query tables in the database. You may need to drop the database manually if it is empty.",
|
|
325
|
+
project=self.project,
|
|
326
|
+
error=mlrun.errors.err_to_str(e),
|
|
327
|
+
)
|
|
328
|
+
|
|
329
|
+
if drop_database:
|
|
330
|
+
logger.debug(
|
|
331
|
+
"Going to drop the TDEngine database",
|
|
332
|
+
project=self.project,
|
|
333
|
+
database=self.database,
|
|
334
|
+
)
|
|
335
|
+
drop_database_query = self._drop_database_query()
|
|
336
|
+
try:
|
|
337
|
+
self.connection.run(
|
|
338
|
+
statements=drop_database_query,
|
|
339
|
+
timeout=self._timeout,
|
|
340
|
+
retries=self._retries,
|
|
341
|
+
)
|
|
342
|
+
logger.debug(
|
|
343
|
+
"The TDEngine database has been successfully dropped",
|
|
344
|
+
project=self.project,
|
|
345
|
+
database=self.database,
|
|
346
|
+
)
|
|
347
|
+
except Exception as e:
|
|
348
|
+
logger.warning(
|
|
349
|
+
"Failed to drop the database. You may need to drop it manually if it is empty.",
|
|
350
|
+
project=self.project,
|
|
351
|
+
error=mlrun.errors.err_to_str(e),
|
|
352
|
+
)
|
|
353
|
+
|
|
300
354
|
def get_model_endpoint_real_time_metrics(
|
|
301
355
|
self,
|
|
302
356
|
endpoint_id: str,
|
|
@@ -137,30 +137,27 @@ def get_stream_path(
|
|
|
137
137
|
)
|
|
138
138
|
|
|
139
139
|
if isinstance(profile, mlrun.datastore.datastore_profile.DatastoreProfileV3io):
|
|
140
|
-
stream_uri =
|
|
140
|
+
stream_uri = mlrun.mlconf.get_model_monitoring_file_target_path(
|
|
141
|
+
project=project,
|
|
142
|
+
kind=mm_constants.FileTargetKind.STREAM,
|
|
143
|
+
target="online",
|
|
144
|
+
function_name=function_name,
|
|
145
|
+
)
|
|
146
|
+
return stream_uri.replace("v3io://", f"ds://{profile.name}")
|
|
147
|
+
|
|
141
148
|
elif isinstance(
|
|
142
149
|
profile, mlrun.datastore.datastore_profile.DatastoreProfileKafkaSource
|
|
143
150
|
):
|
|
144
|
-
|
|
145
|
-
|
|
151
|
+
topic = mlrun.common.model_monitoring.helpers.get_kafka_topic(
|
|
152
|
+
project=project, function_name=function_name
|
|
153
|
+
)
|
|
154
|
+
return f"ds://{profile.name}/{topic}"
|
|
146
155
|
else:
|
|
147
156
|
raise mlrun.errors.MLRunValueError(
|
|
148
157
|
f"Received an unexpected stream profile type: {type(profile)}\n"
|
|
149
158
|
"Expects `DatastoreProfileV3io` or `DatastoreProfileKafkaSource`."
|
|
150
159
|
)
|
|
151
160
|
|
|
152
|
-
if not stream_uri or stream_uri == "v3io":
|
|
153
|
-
stream_uri = mlrun.mlconf.get_model_monitoring_file_target_path(
|
|
154
|
-
project=project,
|
|
155
|
-
kind=mm_constants.FileTargetKind.STREAM,
|
|
156
|
-
target="online",
|
|
157
|
-
function_name=function_name,
|
|
158
|
-
)
|
|
159
|
-
|
|
160
|
-
return mlrun.common.model_monitoring.helpers.parse_monitoring_stream_path(
|
|
161
|
-
stream_uri=stream_uri, project=project, function_name=function_name
|
|
162
|
-
)
|
|
163
|
-
|
|
164
161
|
|
|
165
162
|
def get_monitoring_parquet_path(
|
|
166
163
|
project: "MlrunProject",
|
|
@@ -314,18 +311,9 @@ def _get_kafka_output_stream(
|
|
|
314
311
|
topic = mlrun.common.model_monitoring.helpers.get_kafka_topic(
|
|
315
312
|
project=project, function_name=function_name
|
|
316
313
|
)
|
|
317
|
-
|
|
318
|
-
producer_options =
|
|
319
|
-
|
|
320
|
-
sasl = profile_attributes["sasl"]
|
|
321
|
-
producer_options.update(
|
|
322
|
-
{
|
|
323
|
-
"security_protocol": "SASL_PLAINTEXT",
|
|
324
|
-
"sasl_mechanism": sasl["mechanism"],
|
|
325
|
-
"sasl_plain_username": sasl["user"],
|
|
326
|
-
"sasl_plain_password": sasl["password"],
|
|
327
|
-
},
|
|
328
|
-
)
|
|
314
|
+
attributes = kafka_profile.attributes()
|
|
315
|
+
producer_options = mlrun.datastore.utils.KafkaParameters(attributes).producer()
|
|
316
|
+
|
|
329
317
|
return mlrun.platforms.iguazio.KafkaOutputStream(
|
|
330
318
|
brokers=kafka_profile.brokers,
|
|
331
319
|
topic=topic,
|
|
@@ -31,7 +31,6 @@ from mlrun.common.schemas.model_monitoring.constants import (
|
|
|
31
31
|
FileTargetKind,
|
|
32
32
|
ProjectSecretKeys,
|
|
33
33
|
)
|
|
34
|
-
from mlrun.datastore import parse_kafka_url
|
|
35
34
|
from mlrun.model_monitoring.db import TSDBConnector
|
|
36
35
|
from mlrun.utils import logger
|
|
37
36
|
|
|
@@ -259,39 +258,13 @@ class EventStreamProcessor:
|
|
|
259
258
|
|
|
260
259
|
# controller branch
|
|
261
260
|
def apply_push_controller_stream(stream_uri: str):
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
269
|
-
after="ForwardNOP",
|
|
270
|
-
)
|
|
271
|
-
elif stream_uri.startswith("kafka://"):
|
|
272
|
-
topic, brokers = parse_kafka_url(stream_uri)
|
|
273
|
-
logger.info(
|
|
274
|
-
"Controller stream uri for kafka",
|
|
275
|
-
stream_uri=stream_uri,
|
|
276
|
-
topic=topic,
|
|
277
|
-
brokers=brokers,
|
|
278
|
-
)
|
|
279
|
-
if isinstance(brokers, list):
|
|
280
|
-
path = f"kafka://{brokers[0]}/{topic}"
|
|
281
|
-
elif isinstance(brokers, str):
|
|
282
|
-
path = f"kafka://{brokers}/{topic}"
|
|
283
|
-
else:
|
|
284
|
-
raise mlrun.errors.MLRunInvalidArgumentError(
|
|
285
|
-
"Brokers must be a list or str check controller stream uri"
|
|
286
|
-
)
|
|
287
|
-
graph.add_step(
|
|
288
|
-
">>",
|
|
289
|
-
"controller_stream_kafka",
|
|
290
|
-
path=path,
|
|
291
|
-
kafka_brokers=brokers,
|
|
292
|
-
_sharding_func=ControllerEvent.ENDPOINT_ID,
|
|
293
|
-
after="ForwardNOP",
|
|
294
|
-
)
|
|
261
|
+
graph.add_step(
|
|
262
|
+
">>",
|
|
263
|
+
"controller_stream",
|
|
264
|
+
path=stream_uri,
|
|
265
|
+
sharding_func=ControllerEvent.ENDPOINT_ID,
|
|
266
|
+
after="ForwardNOP",
|
|
267
|
+
)
|
|
295
268
|
|
|
296
269
|
apply_push_controller_stream(controller_stream_uri)
|
|
297
270
|
|
mlrun/serving/states.py
CHANGED
|
@@ -31,6 +31,13 @@ import storey.utils
|
|
|
31
31
|
|
|
32
32
|
import mlrun
|
|
33
33
|
import mlrun.common.schemas as schemas
|
|
34
|
+
from mlrun.datastore.datastore_profile import (
|
|
35
|
+
DatastoreProfileKafkaSource,
|
|
36
|
+
DatastoreProfileKafkaTarget,
|
|
37
|
+
DatastoreProfileV3io,
|
|
38
|
+
datastore_profile_read,
|
|
39
|
+
)
|
|
40
|
+
from mlrun.datastore.storeytargets import KafkaStoreyTarget, StreamStoreyTarget
|
|
34
41
|
from mlrun.utils import logger
|
|
35
42
|
|
|
36
43
|
from ..config import config
|
|
@@ -1885,7 +1892,29 @@ def _init_async_objects(context, steps):
|
|
|
1885
1892
|
|
|
1886
1893
|
kafka_brokers = get_kafka_brokers_from_dict(options, pop=True)
|
|
1887
1894
|
|
|
1888
|
-
if stream_path.startswith("
|
|
1895
|
+
if stream_path and stream_path.startswith("ds://"):
|
|
1896
|
+
datastore_profile = datastore_profile_read(stream_path)
|
|
1897
|
+
if isinstance(
|
|
1898
|
+
datastore_profile,
|
|
1899
|
+
(DatastoreProfileKafkaTarget, DatastoreProfileKafkaSource),
|
|
1900
|
+
):
|
|
1901
|
+
step._async_object = KafkaStoreyTarget(
|
|
1902
|
+
path=stream_path,
|
|
1903
|
+
context=context,
|
|
1904
|
+
**options,
|
|
1905
|
+
)
|
|
1906
|
+
elif isinstance(datastore_profile, DatastoreProfileV3io):
|
|
1907
|
+
step._async_object = StreamStoreyTarget(
|
|
1908
|
+
stream_path=stream_path,
|
|
1909
|
+
context=context,
|
|
1910
|
+
**options,
|
|
1911
|
+
)
|
|
1912
|
+
else:
|
|
1913
|
+
raise mlrun.errors.MLRunValueError(
|
|
1914
|
+
f"Received an unexpected stream profile type: {type(datastore_profile)}\n"
|
|
1915
|
+
"Expects `DatastoreProfileV3io` or `DatastoreProfileKafkaSource`."
|
|
1916
|
+
)
|
|
1917
|
+
elif stream_path.startswith("kafka://") or kafka_brokers:
|
|
1889
1918
|
topic, brokers = parse_kafka_url(stream_path, kafka_brokers)
|
|
1890
1919
|
|
|
1891
1920
|
kafka_producer_options = options.pop(
|
mlrun/utils/version/version.json
CHANGED
|
@@ -12,7 +12,7 @@ mlrun/run.py,sha256=NScg8Acp62329jryOK5nldu2LYVkIZgSiEEg8IJrQwo,45124
|
|
|
12
12
|
mlrun/secrets.py,sha256=dZPdkc_zzfscVQepOHUwmzFqnBavDCBXV9DQoH_eIYM,7800
|
|
13
13
|
mlrun/alerts/__init__.py,sha256=0gtG1BG0DXxFrXegIkjbM1XEN4sP9ODo0ucXrNld1hU,601
|
|
14
14
|
mlrun/alerts/alert.py,sha256=9kGTtV385Ax-aTm-450HzPwEek9e0c3O3Qln-jXjhFg,15948
|
|
15
|
-
mlrun/api/schemas/__init__.py,sha256=
|
|
15
|
+
mlrun/api/schemas/__init__.py,sha256=tVAnpexDkfI0JWMJNlPSnVOzoV4xqIjWGSln9UkPS4I,13921
|
|
16
16
|
mlrun/artifacts/__init__.py,sha256=ofC2extBCOC1wg1YtdTzWzH3eeG_f-sFBUkHjYtZJpk,1175
|
|
17
17
|
mlrun/artifacts/base.py,sha256=nz2ZqC74JGfWN0M6_hOXXQj3bXSTxNp4eUgvWHVcdvY,29979
|
|
18
18
|
mlrun/artifacts/dataset.py,sha256=QTot5vCgLHatlIWwNnKbWdZ8HHTxaZ7wk4gWQDoqQ2k,16655
|
|
@@ -37,9 +37,9 @@ mlrun/common/formatters/pipeline.py,sha256=oATD3znsuq3s7LipPnZivDPelTX0hJ0MFeeXO
|
|
|
37
37
|
mlrun/common/formatters/project.py,sha256=0G4lhcTAsxQCxd40dKC4894cMH8nKt03BcGyp9wQO14,2102
|
|
38
38
|
mlrun/common/formatters/run.py,sha256=Gcf9lVDqxPMNfWcPX0RJasjTC_N_U0yTBkQ02jOPJ7A,1062
|
|
39
39
|
mlrun/common/model_monitoring/__init__.py,sha256=kXGBqhLN0rlAx0kTXhozGzFsIdSqW0uTSKMmsLgq_is,569
|
|
40
|
-
mlrun/common/model_monitoring/helpers.py,sha256=
|
|
40
|
+
mlrun/common/model_monitoring/helpers.py,sha256=AkuHz4u318MEP4ebxmNWlNXh6HiNLrI5oF7QvJiJkYc,2707
|
|
41
41
|
mlrun/common/runtimes/constants.py,sha256=PBpCtPixbKjP9aTo6Qqtui6FjWcXbFxhbSzduV4ttc4,12324
|
|
42
|
-
mlrun/common/schemas/__init__.py,sha256=
|
|
42
|
+
mlrun/common/schemas/__init__.py,sha256=tNeYsylPAgG-3YI_Foka80alqm0JoCpXtuS_nDZzuaU,5324
|
|
43
43
|
mlrun/common/schemas/alert.py,sha256=tRsjHEQTjCb-83GS0mprsu5junvqL4aQjWN2Rt_yAaM,10183
|
|
44
44
|
mlrun/common/schemas/api_gateway.py,sha256=3a0QxECLmoDkD5IiOKtXJL-uiWB26Hg55WMA3nULYuI,7127
|
|
45
45
|
mlrun/common/schemas/artifact.py,sha256=T-CdBIqgDUH-i8hx1Dp-Msr8v6UGwwp3d9j8rUzb9ZM,4249
|
|
@@ -72,21 +72,21 @@ mlrun/common/schemas/secret.py,sha256=CCxFYiPwJtDxwg2VVJH9nUG9cAZ2a34IjeuaWv-BYl
|
|
|
72
72
|
mlrun/common/schemas/serving.py,sha256=81ZxlDHP1fm9VPmXZGkjZj2n6cVRmqEN478hsmvv5QA,744
|
|
73
73
|
mlrun/common/schemas/tag.py,sha256=HRZi5QZ4vVGaCr2AMk9eJgcNiAIXmH4YDc8a4fvF770,893
|
|
74
74
|
mlrun/common/schemas/workflow.py,sha256=6u9niXfXpV-_c2rZL97gFIdAnOfM5WK-OCbrM5Kk34s,2108
|
|
75
|
-
mlrun/common/schemas/model_monitoring/__init__.py,sha256=
|
|
76
|
-
mlrun/common/schemas/model_monitoring/constants.py,sha256=
|
|
75
|
+
mlrun/common/schemas/model_monitoring/__init__.py,sha256=SxHG-GIdcTEuFxpKzkUdT9zKaU5Xqz9qF1uCwXvZ2z8,1709
|
|
76
|
+
mlrun/common/schemas/model_monitoring/constants.py,sha256=dqCAZQM9dw_6WIlf61XYMHY7TFi_ujSkWt1r_8YlXmg,12207
|
|
77
77
|
mlrun/common/schemas/model_monitoring/grafana.py,sha256=THQlLfPBevBksta8p5OaIsBaJtsNSXexLvHrDxOaVns,2095
|
|
78
|
-
mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=
|
|
78
|
+
mlrun/common/schemas/model_monitoring/model_endpoints.py,sha256=LIRtQ_gD7n17pQbacynp7Os8HyFwqobTKIPH0lwqoUA,11663
|
|
79
79
|
mlrun/data_types/__init__.py,sha256=unRo9GGwCmj0hBKBRsXJ2P4BzpQaddlQTvIrVQaKluI,984
|
|
80
80
|
mlrun/data_types/data_types.py,sha256=0_oKLC6-sXL2_nnaDMP_HSXB3fD1nJAG4J2Jq6sGNNw,4998
|
|
81
81
|
mlrun/data_types/infer.py,sha256=Ogp3rsENVkjU0GDaGa9J1vjGrvMxgzwbSEuG51nt61E,6477
|
|
82
82
|
mlrun/data_types/spark.py,sha256=4fPpqjFCYeFgK_yHhUNM4rT-1Gw9YiXazyjTK7TtbTI,9626
|
|
83
83
|
mlrun/data_types/to_pandas.py,sha256=KOy0FLXPJirsgH6szcC5BI6t70yVDCjuo6LmuYHNTuI,11429
|
|
84
|
-
mlrun/datastore/__init__.py,sha256=
|
|
84
|
+
mlrun/datastore/__init__.py,sha256=81ulmQnRk1ENvwYOdetxqsLnr2gYVtW-KsvF-tY1Jxk,5783
|
|
85
85
|
mlrun/datastore/alibaba_oss.py,sha256=k-OHVe08HjMewlkpsT657CbOiVFAfSq9_EqhCE-k86s,4940
|
|
86
86
|
mlrun/datastore/azure_blob.py,sha256=SzAcHYSXkm8Zpopz2Ea-rWVClH0URocUazcNK04S9W0,12776
|
|
87
87
|
mlrun/datastore/base.py,sha256=5WYsdmE_Nog2mflRfI5bOP9X5qW39xzM0TdVZXxhaHM,26308
|
|
88
88
|
mlrun/datastore/datastore.py,sha256=frUYYP4i8ZmnY8GNXSgN_3x_exRgRPfxrCtAGEUifEU,9478
|
|
89
|
-
mlrun/datastore/datastore_profile.py,sha256=
|
|
89
|
+
mlrun/datastore/datastore_profile.py,sha256=Ybh_75jnkwuyAXOIFc_Wm-3XxOpEovlQNZ2R0OfniBg,23860
|
|
90
90
|
mlrun/datastore/dbfs_store.py,sha256=QkDRzwFnvm7CgEg4NuGxes6tBgKDyhX0CiBUvK8c9pk,6568
|
|
91
91
|
mlrun/datastore/filestore.py,sha256=OcykjzhbUAZ6_Cb9bGAXRL2ngsOpxXSb4rR0lyogZtM,3773
|
|
92
92
|
mlrun/datastore/google_cloud_storage.py,sha256=MnToY6irdhBZ8Wcapqnr1Yq2724LAh2uPO7MAtdWfUY,8716
|
|
@@ -95,25 +95,25 @@ mlrun/datastore/inmem.py,sha256=IsM83nn-3CqmGdLzim7i9ZmJwG6ZGhBZGN6_hszWZnE,2951
|
|
|
95
95
|
mlrun/datastore/redis.py,sha256=QeNMkSz3zQXiXZhFUZcEtViqqbUysGJditbqe5M-J48,5682
|
|
96
96
|
mlrun/datastore/s3.py,sha256=GjJnQLrigCqU9_ukRWv1pKhxfUtrMGFBUp6fmpPXUCY,9224
|
|
97
97
|
mlrun/datastore/snowflake_utils.py,sha256=Wohvnlmq8j1d98RCaknll-iWdZZpSlCrKhUOEy0_-CA,1483
|
|
98
|
-
mlrun/datastore/sources.py,sha256=
|
|
98
|
+
mlrun/datastore/sources.py,sha256=KQp1nNN7TcaewFm3It03H1R28uzlWGZDDHJyqiT--vw,49062
|
|
99
99
|
mlrun/datastore/spark_udf.py,sha256=NnnB3DZxZb-rqpRy7b-NC7QWXuuqFn3XkBDc86tU4mQ,1498
|
|
100
100
|
mlrun/datastore/spark_utils.py,sha256=_AsVoU5Ix_-W7Gyq8io8V-2GTk0m8THJNDP3WGGaWJY,2865
|
|
101
101
|
mlrun/datastore/store_resources.py,sha256=PFOMrZ6KH6hBOb0PiO-cHx_kv0UpHu5P2t8_mrR-lS4,6842
|
|
102
|
-
mlrun/datastore/storeytargets.py,sha256=
|
|
102
|
+
mlrun/datastore/storeytargets.py,sha256=PnlEMc4iD_0zhZZYZtEISPoGIgbsEtZNUvZ7a7ALlXo,6459
|
|
103
103
|
mlrun/datastore/targets.py,sha256=QiEK-mHmUt2qnS2yaBSSKgk8CKqsGU-JoQ9kHoW1bvE,80759
|
|
104
|
-
mlrun/datastore/utils.py,sha256=
|
|
104
|
+
mlrun/datastore/utils.py,sha256=L51jAKsIqnl5_Q_x4sI37TbGK2JCqWE9NiS5nWF3bts,10207
|
|
105
105
|
mlrun/datastore/v3io.py,sha256=QSYBORRLcJTeM9mt0EaWzyLcdmzrPkqrF7k5uLTam5U,8209
|
|
106
106
|
mlrun/datastore/vectorstore.py,sha256=k-yom5gfw20hnVG0Rg7aBEehuXwvAloZwn0cx0VGals,11708
|
|
107
107
|
mlrun/datastore/wasbfs/__init__.py,sha256=s5Ul-0kAhYqFjKDR2X0O2vDGDbLQQduElb32Ev56Te4,1343
|
|
108
108
|
mlrun/datastore/wasbfs/fs.py,sha256=ge8NK__5vTcFT-krI155_8RDUywQw4SIRX6BWATXy9Q,6299
|
|
109
109
|
mlrun/db/__init__.py,sha256=WqJ4x8lqJ7ZoKbhEyFqkYADd9P6E3citckx9e9ZLcIU,1163
|
|
110
110
|
mlrun/db/auth_utils.py,sha256=hpg8D2r82oN0BWabuWN04BTNZ7jYMAF242YSUpK7LFM,5211
|
|
111
|
-
mlrun/db/base.py,sha256=
|
|
111
|
+
mlrun/db/base.py,sha256=U77W97LMeVGRmXHH_gafZE95TiFLcI27HJVkAmLTPaQ,30788
|
|
112
112
|
mlrun/db/factory.py,sha256=yP2vVmveUE7LYTCHbS6lQIxP9rW--zdISWuPd_I3d_4,2111
|
|
113
|
-
mlrun/db/httpdb.py,sha256=
|
|
114
|
-
mlrun/db/nopdb.py,sha256=
|
|
115
|
-
mlrun/feature_store/__init__.py,sha256=
|
|
116
|
-
mlrun/feature_store/api.py,sha256=
|
|
113
|
+
mlrun/db/httpdb.py,sha256=wbfiy7W3Uzfc_1dVbJOYnpaHBwmx9QScg2YUkgSUm7o,231004
|
|
114
|
+
mlrun/db/nopdb.py,sha256=kjvoaWWc4OmQ7AdQKomtRzviJy1_dK3jdMCJNkic34o,27223
|
|
115
|
+
mlrun/feature_store/__init__.py,sha256=8vLyiRgLyfzn5yjd46mjZ__OXm5MiDcZL5nQk63micc,1290
|
|
116
|
+
mlrun/feature_store/api.py,sha256=ZjukeKYxqEmQvT7y1aTogmN4hHujPrjrcWoGWr8rxFg,36880
|
|
117
117
|
mlrun/feature_store/common.py,sha256=Z7USI-d1fo0iwBMsqMBtJflJfyuiV3BLoDXQPSAoBAs,12826
|
|
118
118
|
mlrun/feature_store/feature_set.py,sha256=lakkuKYAvYDJKDTE0xJa5n1nEipMPwpLki-J3CMk0mQ,56221
|
|
119
119
|
mlrun/feature_store/feature_vector.py,sha256=9EJXdnPklwKdkYDKV0hxByIjd59K6R2S-DnP7jZlwoY,44602
|
|
@@ -220,8 +220,8 @@ mlrun/model_monitoring/__init__.py,sha256=ELy7njEtZnz09Dc6PGZSFFEGtnwI15bJNWM3Pj
|
|
|
220
220
|
mlrun/model_monitoring/api.py,sha256=50QqeQaWafJcItE4HzcD82lt2CKw-p1BTCXbc9JDN1k,28550
|
|
221
221
|
mlrun/model_monitoring/controller.py,sha256=j6hqNYKhrw37PJZBcW4BgjsCpG7PtVMvFTpnZO95QVQ,29078
|
|
222
222
|
mlrun/model_monitoring/features_drift_table.py,sha256=c6GpKtpOJbuT1u5uMWDL_S-6N4YPOmlktWMqPme3KFY,25308
|
|
223
|
-
mlrun/model_monitoring/helpers.py,sha256=
|
|
224
|
-
mlrun/model_monitoring/stream_processing.py,sha256=
|
|
223
|
+
mlrun/model_monitoring/helpers.py,sha256=Q4vcc7x41lCJdFQIE8UFPY0WIQ8a-4tSGhziMA4ib4w,22003
|
|
224
|
+
mlrun/model_monitoring/stream_processing.py,sha256=4M0H4txMlsC2Q5iKTPp992KWoNPAJjPHj9rqWhXbl8w,33321
|
|
225
225
|
mlrun/model_monitoring/tracking_policy.py,sha256=PBIGrUYWrwcE5gwXupBIVzOb0QRRwPJsgQm_yLGQxB4,5595
|
|
226
226
|
mlrun/model_monitoring/writer.py,sha256=vbL7bqTyNu8q4bNcebX72sUMybVDAoTWg-CXq4fov3Y,8429
|
|
227
227
|
mlrun/model_monitoring/applications/__init__.py,sha256=xDBxkBjl-whHSG_4t1mLkxiypLH-fzn8TmAW9Mjo2uI,759
|
|
@@ -241,7 +241,7 @@ mlrun/model_monitoring/db/tsdb/helpers.py,sha256=0oUXc4aUkYtP2SGP6jTb3uPPKImIUsV
|
|
|
241
241
|
mlrun/model_monitoring/db/tsdb/tdengine/__init__.py,sha256=vgBdsKaXUURKqIf3M0y4sRatmSVA4CQiJs7J5dcVBkQ,620
|
|
242
242
|
mlrun/model_monitoring/db/tsdb/tdengine/schemas.py,sha256=qfKDUZhgteL0mp2A1aP1iMmcthgUMKmZqMUidZjQktQ,12649
|
|
243
243
|
mlrun/model_monitoring/db/tsdb/tdengine/stream_graph_steps.py,sha256=Uadj0UvAmln2MxDWod-kAzau1uNlqZh981rPhbUH_5M,2857
|
|
244
|
-
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=
|
|
244
|
+
mlrun/model_monitoring/db/tsdb/tdengine/tdengine_connector.py,sha256=2uSpIUpUGoFRCgrSv0RLGWoR7oc69GWW_bLbmjtfYEk,34860
|
|
245
245
|
mlrun/model_monitoring/db/tsdb/v3io/__init__.py,sha256=aL3bfmQsUQ-sbvKGdNihFj8gLCK3mSys0qDcXtYOwgc,616
|
|
246
246
|
mlrun/model_monitoring/db/tsdb/v3io/stream_graph_steps.py,sha256=_-zo9relCDtjGgievxAcAP9gVN9nDWs8BzGtFwTjb9M,6284
|
|
247
247
|
mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py,sha256=foxYWx7OjOfat2SHmzYrG8bIfaQ5NDnBtpDZua_NVGE,41141
|
|
@@ -305,7 +305,7 @@ mlrun/serving/remote.py,sha256=gxJkj_J3j-sZcVUbUzbAmJafP6t6y4NVFsu0kWmYngA,18818
|
|
|
305
305
|
mlrun/serving/routers.py,sha256=tKsOPegKy6FyTfSBWqMEYGQMSKNeqM-9L__tozE6ftU,52978
|
|
306
306
|
mlrun/serving/server.py,sha256=KiNhW0nTV5STZPzR6kEAUFVzCCAX8qv0g9AoCopARrM,23429
|
|
307
307
|
mlrun/serving/serving_wrapper.py,sha256=R670-S6PX_d5ER6jiHtRvacuPyFzQH0mEf2K0sBIIOM,836
|
|
308
|
-
mlrun/serving/states.py,sha256=
|
|
308
|
+
mlrun/serving/states.py,sha256=VQaZPmtNPuAnczafXit_UykO-o3zIhb9l6nCz0xhXeQ,72119
|
|
309
309
|
mlrun/serving/utils.py,sha256=k2EIYDWHUGkE-IBI6T0UNT32fw-KySsccIJM_LObI00,4171
|
|
310
310
|
mlrun/serving/v1_serving.py,sha256=c6J_MtpE-Tqu00-6r4eJOCO6rUasHDal9W2eBIcrl50,11853
|
|
311
311
|
mlrun/serving/v2_serving.py,sha256=ZSNVGY3iR3qKmi5-mr_-TIbZMaaJ_EiMm7jVvkzG4Lo,25044
|
|
@@ -339,11 +339,11 @@ mlrun/utils/notifications/notification/mail.py,sha256=ZyJ3eqd8simxffQmXzqd3bgbAq
|
|
|
339
339
|
mlrun/utils/notifications/notification/slack.py,sha256=eQvmctTh6wIG5xVOesLLV9S1-UUCu5UEQ9JIJOor3ts,7183
|
|
340
340
|
mlrun/utils/notifications/notification/webhook.py,sha256=NeyIMSBojjjTJaUHmPbxMByp34GxYkl1-16NqzU27fU,4943
|
|
341
341
|
mlrun/utils/version/__init__.py,sha256=7kkrB7hEZ3cLXoWj1kPoDwo4MaswsI2JVOBpbKgPAgc,614
|
|
342
|
-
mlrun/utils/version/version.json,sha256=
|
|
342
|
+
mlrun/utils/version/version.json,sha256=CNl3JxbZXs6sJ9peVUrvEii97096bfdKaZCzjyfVOQA,89
|
|
343
343
|
mlrun/utils/version/version.py,sha256=eEW0tqIAkU9Xifxv8Z9_qsYnNhn3YH7NRAfM-pPLt1g,1878
|
|
344
|
-
mlrun-1.8.
|
|
345
|
-
mlrun-1.8.
|
|
346
|
-
mlrun-1.8.
|
|
347
|
-
mlrun-1.8.
|
|
348
|
-
mlrun-1.8.
|
|
349
|
-
mlrun-1.8.
|
|
344
|
+
mlrun-1.8.0rc38.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
345
|
+
mlrun-1.8.0rc38.dist-info/METADATA,sha256=cGXENeZRrZ4-zKjjKVCuyYCYiiOLKeRfzIfvUupv67E,25986
|
|
346
|
+
mlrun-1.8.0rc38.dist-info/WHEEL,sha256=nn6H5-ilmfVryoAQl3ZQ2l8SH5imPWFpm1A5FgEuFV4,91
|
|
347
|
+
mlrun-1.8.0rc38.dist-info/entry_points.txt,sha256=1Owd16eAclD5pfRCoJpYC2ZJSyGNTtUr0nCELMioMmU,46
|
|
348
|
+
mlrun-1.8.0rc38.dist-info/top_level.txt,sha256=NObLzw3maSF9wVrgSeYBv-fgnHkAJ1kEkh12DLdd5KM,6
|
|
349
|
+
mlrun-1.8.0rc38.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|