mlrun 1.7.0rc22__py3-none-any.whl → 1.7.0rc24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (35) hide show
  1. mlrun/common/helpers.py +11 -0
  2. mlrun/common/schemas/__init__.py +2 -0
  3. mlrun/common/schemas/api_gateway.py +57 -16
  4. mlrun/common/schemas/feature_store.py +78 -28
  5. mlrun/db/base.py +1 -0
  6. mlrun/db/httpdb.py +9 -6
  7. mlrun/db/nopdb.py +1 -0
  8. mlrun/errors.py +1 -3
  9. mlrun/frameworks/__init__.py +0 -6
  10. mlrun/model_monitoring/db/stores/__init__.py +27 -21
  11. mlrun/model_monitoring/db/stores/base/store.py +1 -0
  12. mlrun/model_monitoring/db/stores/sqldb/sql_store.py +8 -8
  13. mlrun/model_monitoring/db/stores/v3io_kv/kv_store.py +8 -8
  14. mlrun/model_monitoring/db/tsdb/__init__.py +1 -1
  15. mlrun/model_monitoring/db/tsdb/base.py +1 -1
  16. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +2 -3
  17. mlrun/model_monitoring/helpers.py +8 -4
  18. mlrun/model_monitoring/stream_processing.py +9 -11
  19. mlrun/model_monitoring/writer.py +10 -6
  20. mlrun/package/__init__.py +1 -13
  21. mlrun/package/packagers/__init__.py +1 -6
  22. mlrun/projects/project.py +5 -1
  23. mlrun/runtimes/nuclio/application/application.py +0 -2
  24. mlrun/runtimes/nuclio/serving.py +9 -6
  25. mlrun/serving/server.py +4 -0
  26. mlrun/serving/v2_serving.py +54 -38
  27. mlrun/utils/notifications/notification/base.py +39 -7
  28. mlrun/utils/notifications/notification/slack.py +1 -14
  29. mlrun/utils/version/version.json +2 -2
  30. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/METADATA +1 -1
  31. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/RECORD +35 -35
  32. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/LICENSE +0 -0
  33. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/WHEEL +0 -0
  34. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/entry_points.txt +0 -0
  35. {mlrun-1.7.0rc22.dist-info → mlrun-1.7.0rc24.dist-info}/top_level.txt +0 -0
mlrun/common/helpers.py CHANGED
@@ -34,3 +34,14 @@ def parse_versioned_object_uri(
34
34
  uri = uri[:loc]
35
35
 
36
36
  return project, uri, tag, hash_key
37
+
38
+
39
+ def generate_api_gateway_name(project: str, name: str) -> str:
40
+ """
41
+ Generate a unique (within project) api gateway name
42
+ :param project: project name
43
+ :param name: api gateway name
44
+
45
+ :return: the resolved api gateway name
46
+ """
47
+ return f"{project}-{name}" if project else name
@@ -82,6 +82,7 @@ from .events import (
82
82
  )
83
83
  from .feature_store import (
84
84
  EntitiesOutput,
85
+ EntitiesOutputV2,
85
86
  Entity,
86
87
  EntityListOutput,
87
88
  EntityRecord,
@@ -98,6 +99,7 @@ from .feature_store import (
98
99
  FeatureSetSpec,
99
100
  FeatureSetsTagsOutput,
100
101
  FeaturesOutput,
102
+ FeaturesOutputV2,
101
103
  FeatureVector,
102
104
  FeatureVectorRecord,
103
105
  FeatureVectorsOutput,
@@ -17,8 +17,10 @@ from typing import Optional
17
17
 
18
18
  import pydantic
19
19
 
20
+ import mlrun.common.constants as mlrun_constants
20
21
  import mlrun.common.types
21
22
  from mlrun.common.constants import MLRUN_FUNCTIONS_ANNOTATION
23
+ from mlrun.common.helpers import generate_api_gateway_name
22
24
 
23
25
 
24
26
  class APIGatewayAuthenticationMode(mlrun.common.types.StrEnum):
@@ -100,7 +102,51 @@ class APIGateway(_APIGatewayBaseModel):
100
102
  if upstream.nucliofunction.get("name")
101
103
  ]
102
104
 
103
- def enrich_mlrun_function_names(self):
105
+ def enrich_mlrun_names(self):
106
+ self._enrich_api_gateway_mlrun_name()
107
+ self._enrich_mlrun_function_names()
108
+ return self
109
+
110
+ def replace_nuclio_names_with_mlrun_names(self):
111
+ self._replace_nuclio_api_gateway_name_with_mlrun_name()
112
+ self._replace_nuclio_function_names_with_mlrun_names()
113
+ return self
114
+
115
+ def _replace_nuclio_function_names_with_mlrun_names(self):
116
+ # replace function names from nuclio names to mlrun names
117
+ # and adds mlrun function URI's to an api gateway annotations
118
+ # so when we then get api gateway entity from nuclio, we are able to get mlrun function names
119
+ mlrun_functions = self.metadata.annotations.get(MLRUN_FUNCTIONS_ANNOTATION)
120
+ if mlrun_functions:
121
+ mlrun_function_uris = (
122
+ mlrun_functions.split("&")
123
+ if "&" in mlrun_functions
124
+ else [mlrun_functions]
125
+ )
126
+ if len(mlrun_function_uris) != len(self.spec.upstreams):
127
+ raise mlrun.errors.MLRunValueError(
128
+ "Error when translating nuclio names to mlrun names in api gateway:"
129
+ " number of functions doesn't match the mlrun functions in annotation"
130
+ )
131
+ for i in range(len(mlrun_function_uris)):
132
+ self.spec.upstreams[i].nucliofunction["name"] = mlrun_function_uris[i]
133
+ return self
134
+
135
+ def _replace_nuclio_api_gateway_name_with_mlrun_name(self):
136
+ # replace api gateway name
137
+ # in Nuclio, api gateways are named as `<project>-<mlrun-api-gateway-name>`
138
+ # remove the project prefix from the name if it exists
139
+ project_name = self.metadata.labels.get(
140
+ mlrun_constants.MLRunInternalLabels.nuclio_project_name
141
+ )
142
+ if project_name and self.spec.name.startswith(f"{project_name}-"):
143
+ self.spec.name = self.spec.name[len(project_name) + 1 :]
144
+ self.metadata.name = self.spec.name
145
+ return self
146
+
147
+ def _enrich_mlrun_function_names(self):
148
+ # enrich mlrun names with nuclio prefixes
149
+ # and add mlrun function's URIs to Nuclio function annotations
104
150
  upstream_with_nuclio_names = []
105
151
  mlrun_function_uris = []
106
152
  for upstream in self.spec.upstreams:
@@ -126,21 +172,16 @@ class APIGateway(_APIGatewayBaseModel):
126
172
  )
127
173
  return self
128
174
 
129
- def replace_nuclio_names_with_mlrun_uri(self):
130
- mlrun_functions = self.metadata.annotations.get(MLRUN_FUNCTIONS_ANNOTATION)
131
- if mlrun_functions:
132
- mlrun_function_uris = (
133
- mlrun_functions.split("&")
134
- if "&" in mlrun_functions
135
- else [mlrun_functions]
136
- )
137
- if len(mlrun_function_uris) != len(self.spec.upstreams):
138
- raise mlrun.errors.MLRunValueError(
139
- "Error when translating nuclio names to mlrun names in api gateway:"
140
- " number of functions doesn't match the mlrun functions in annotation"
141
- )
142
- for i in range(len(mlrun_function_uris)):
143
- self.spec.upstreams[i].nucliofunction["name"] = mlrun_function_uris[i]
175
+ def _enrich_api_gateway_mlrun_name(self):
176
+ # replace api gateway name
177
+ # in Nuclio, api gateways are named as `<project>-<mlrun-api-gateway-name>`
178
+ # add the project prefix to the name
179
+ project_name = self.metadata.labels.get(
180
+ mlrun_constants.MLRunInternalLabels.nuclio_project_name
181
+ )
182
+ if project_name:
183
+ self.spec.name = generate_api_gateway_name(project_name, self.spec.name)
184
+ self.metadata.name = self.spec.name
144
185
  return self
145
186
 
146
187
 
@@ -14,7 +14,7 @@
14
14
  #
15
15
  from typing import Optional
16
16
 
17
- from pydantic import BaseModel, Extra, Field
17
+ import pydantic
18
18
 
19
19
  from .auth import AuthorizationResourceTypes, Credentials
20
20
  from .object import (
@@ -27,32 +27,62 @@ from .object import (
27
27
  )
28
28
 
29
29
 
30
- class Feature(BaseModel):
30
+ class FeatureStoreBaseModel(pydantic.BaseModel):
31
+ """
32
+ Intermediate base class, in order to override pydantic's configuration, as per
33
+ https://docs.pydantic.dev/1.10/usage/model_config/#change-behaviour-globally
34
+ """
35
+
36
+ class Config:
37
+ copy_on_model_validation = "none"
38
+
39
+
40
+ class Feature(FeatureStoreBaseModel):
41
+ name: str
42
+ value_type: str
43
+ labels: Optional[dict] = {}
44
+
45
+ class Config:
46
+ extra = pydantic.Extra.allow
47
+
48
+
49
+ class QualifiedFeature(FeatureStoreBaseModel):
50
+ name: str
51
+ value_type: str
52
+ feature_set_index: int
53
+ labels: Optional[dict] = {}
54
+
55
+ class Config:
56
+ extra = pydantic.Extra.allow
57
+
58
+
59
+ class Entity(FeatureStoreBaseModel):
31
60
  name: str
32
61
  value_type: str
33
62
  labels: Optional[dict] = {}
34
63
 
35
64
  class Config:
36
- extra = Extra.allow
65
+ extra = pydantic.Extra.allow
37
66
 
38
67
 
39
- class Entity(BaseModel):
68
+ class QualifiedEntity(FeatureStoreBaseModel):
40
69
  name: str
41
70
  value_type: str
71
+ feature_set_index: int
42
72
  labels: Optional[dict] = {}
43
73
 
44
74
  class Config:
45
- extra = Extra.allow
75
+ extra = pydantic.Extra.allow
46
76
 
47
77
 
48
78
  class FeatureSetSpec(ObjectSpec):
49
79
  entities: list[Entity] = []
50
80
  features: list[Feature] = []
51
- engine: Optional[str] = Field(default="storey")
81
+ engine: Optional[str] = pydantic.Field(default="storey")
52
82
 
53
83
 
54
- class FeatureSet(BaseModel):
55
- kind: ObjectKind = Field(ObjectKind.feature_set, const=True)
84
+ class FeatureSet(FeatureStoreBaseModel):
85
+ kind: ObjectKind = pydantic.Field(ObjectKind.feature_set, const=True)
56
86
  metadata: ObjectMetadata
57
87
  spec: FeatureSetSpec
58
88
  status: ObjectStatus
@@ -62,7 +92,7 @@ class FeatureSet(BaseModel):
62
92
  return AuthorizationResourceTypes.feature_set
63
93
 
64
94
 
65
- class EntityRecord(BaseModel):
95
+ class EntityRecord(FeatureStoreBaseModel):
66
96
  name: str
67
97
  value_type: str
68
98
  labels: list[LabelRecord]
@@ -71,7 +101,7 @@ class EntityRecord(BaseModel):
71
101
  orm_mode = True
72
102
 
73
103
 
74
- class FeatureRecord(BaseModel):
104
+ class FeatureRecord(FeatureStoreBaseModel):
75
105
  name: str
76
106
  value_type: str
77
107
  labels: list[LabelRecord]
@@ -88,44 +118,64 @@ class FeatureSetRecord(ObjectRecord):
88
118
  orm_mode = True
89
119
 
90
120
 
91
- class FeatureSetsOutput(BaseModel):
121
+ class FeatureSetsOutput(FeatureStoreBaseModel):
92
122
  feature_sets: list[FeatureSet]
93
123
 
94
124
 
95
- class FeatureSetsTagsOutput(BaseModel):
125
+ class FeatureSetsTagsOutput(FeatureStoreBaseModel):
96
126
  tags: list[str] = []
97
127
 
98
128
 
99
- class FeatureSetDigestSpec(BaseModel):
129
+ class FeatureSetDigestSpec(FeatureStoreBaseModel):
100
130
  entities: list[Entity]
101
131
  features: list[Feature]
102
132
 
103
133
 
104
- class FeatureSetDigestOutput(BaseModel):
134
+ class FeatureSetDigestOutput(FeatureStoreBaseModel):
105
135
  metadata: ObjectMetadata
106
136
  spec: FeatureSetDigestSpec
107
137
 
108
138
 
109
- class FeatureListOutput(BaseModel):
139
+ class FeatureSetDigestSpecV2(FeatureStoreBaseModel):
140
+ entities: list[Entity]
141
+
142
+
143
+ class FeatureSetDigestOutputV2(FeatureStoreBaseModel):
144
+ feature_set_index: int
145
+ metadata: ObjectMetadata
146
+ spec: FeatureSetDigestSpecV2
147
+
148
+
149
+ class FeatureListOutput(FeatureStoreBaseModel):
110
150
  feature: Feature
111
151
  feature_set_digest: FeatureSetDigestOutput
112
152
 
113
153
 
114
- class FeaturesOutput(BaseModel):
154
+ class FeaturesOutput(FeatureStoreBaseModel):
115
155
  features: list[FeatureListOutput]
116
156
 
117
157
 
118
- class EntityListOutput(BaseModel):
158
+ class FeaturesOutputV2(FeatureStoreBaseModel):
159
+ features: list[QualifiedFeature]
160
+ feature_set_digests: list[FeatureSetDigestOutputV2]
161
+
162
+
163
+ class EntityListOutput(FeatureStoreBaseModel):
119
164
  entity: Entity
120
165
  feature_set_digest: FeatureSetDigestOutput
121
166
 
122
167
 
123
- class EntitiesOutput(BaseModel):
168
+ class EntitiesOutputV2(FeatureStoreBaseModel):
169
+ entities: list[QualifiedEntity]
170
+ feature_set_digests: list[FeatureSetDigestOutputV2]
171
+
172
+
173
+ class EntitiesOutput(FeatureStoreBaseModel):
124
174
  entities: list[EntityListOutput]
125
175
 
126
176
 
127
- class FeatureVector(BaseModel):
128
- kind: ObjectKind = Field(ObjectKind.feature_vector, const=True)
177
+ class FeatureVector(FeatureStoreBaseModel):
178
+ kind: ObjectKind = pydantic.Field(ObjectKind.feature_vector, const=True)
129
179
  metadata: ObjectMetadata
130
180
  spec: ObjectSpec
131
181
  status: ObjectStatus
@@ -139,39 +189,39 @@ class FeatureVectorRecord(ObjectRecord):
139
189
  pass
140
190
 
141
191
 
142
- class FeatureVectorsOutput(BaseModel):
192
+ class FeatureVectorsOutput(FeatureStoreBaseModel):
143
193
  feature_vectors: list[FeatureVector]
144
194
 
145
195
 
146
- class FeatureVectorsTagsOutput(BaseModel):
196
+ class FeatureVectorsTagsOutput(FeatureStoreBaseModel):
147
197
  tags: list[str] = []
148
198
 
149
199
 
150
- class DataSource(BaseModel):
200
+ class DataSource(FeatureStoreBaseModel):
151
201
  kind: str
152
202
  name: str
153
203
  path: str
154
204
 
155
205
  class Config:
156
- extra = Extra.allow
206
+ extra = pydantic.Extra.allow
157
207
 
158
208
 
159
- class DataTarget(BaseModel):
209
+ class DataTarget(FeatureStoreBaseModel):
160
210
  kind: str
161
211
  name: str
162
212
  path: Optional[str]
163
213
 
164
214
  class Config:
165
- extra = Extra.allow
215
+ extra = pydantic.Extra.allow
166
216
 
167
217
 
168
- class FeatureSetIngestInput(BaseModel):
218
+ class FeatureSetIngestInput(FeatureStoreBaseModel):
169
219
  source: Optional[DataSource]
170
220
  targets: Optional[list[DataTarget]]
171
221
  infer_options: Optional[int]
172
222
  credentials: Credentials = Credentials()
173
223
 
174
224
 
175
- class FeatureSetIngestOutput(BaseModel):
225
+ class FeatureSetIngestOutput(FeatureStoreBaseModel):
176
226
  feature_set: FeatureSet
177
227
  run_object: dict
mlrun/db/base.py CHANGED
@@ -838,6 +838,7 @@ class RunDBInterface(ABC):
838
838
  base_period: int = 10,
839
839
  image: str = "mlrun/mlrun",
840
840
  deploy_histogram_data_drift_app: bool = True,
841
+ rebuild_images: bool = False,
841
842
  ) -> None:
842
843
  pass
843
844
 
mlrun/db/httpdb.py CHANGED
@@ -3320,6 +3320,7 @@ class HTTPRunDB(RunDBInterface):
3320
3320
  base_period: int = 10,
3321
3321
  image: str = "mlrun/mlrun",
3322
3322
  deploy_histogram_data_drift_app: bool = True,
3323
+ rebuild_images: bool = False,
3323
3324
  ) -> None:
3324
3325
  """
3325
3326
  Deploy model monitoring application controller, writer and stream functions.
@@ -3329,13 +3330,14 @@ class HTTPRunDB(RunDBInterface):
3329
3330
  The stream function goal is to monitor the log of the data stream. It is triggered when a new log entry
3330
3331
  is detected. It processes the new events into statistics that are then written to statistics databases.
3331
3332
 
3332
- :param project: Project name.
3333
- :param base_period: The time period in minutes in which the model monitoring controller function
3334
- triggers. By default, the base period is 10 minutes.
3335
- :param image: The image of the model monitoring controller, writer & monitoring
3336
- stream functions, which are real time nuclio functions.
3337
- By default, the image is mlrun/mlrun.
3333
+ :param project: Project name.
3334
+ :param base_period: The time period in minutes in which the model monitoring controller
3335
+ function triggers. By default, the base period is 10 minutes.
3336
+ :param image: The image of the model monitoring controller, writer & monitoring
3337
+ stream functions, which are real time nuclio functions.
3338
+ By default, the image is mlrun/mlrun.
3338
3339
  :param deploy_histogram_data_drift_app: If true, deploy the default histogram-based data drift application.
3340
+ :param rebuild_images: If true, force rebuild of model monitoring infrastructure images.
3339
3341
  """
3340
3342
  self.api_call(
3341
3343
  method=mlrun.common.types.HTTPMethod.POST,
@@ -3344,6 +3346,7 @@ class HTTPRunDB(RunDBInterface):
3344
3346
  "base_period": base_period,
3345
3347
  "image": image,
3346
3348
  "deploy_histogram_data_drift_app": deploy_histogram_data_drift_app,
3349
+ "rebuild_images": rebuild_images,
3347
3350
  },
3348
3351
  )
3349
3352
 
mlrun/db/nopdb.py CHANGED
@@ -675,6 +675,7 @@ class NopDB(RunDBInterface):
675
675
  base_period: int = 10,
676
676
  image: str = "mlrun/mlrun",
677
677
  deploy_histogram_data_drift_app: bool = True,
678
+ rebuild_images: bool = False,
678
679
  ) -> None:
679
680
  pass
680
681
 
mlrun/errors.py CHANGED
@@ -92,9 +92,7 @@ def raise_for_status(
92
92
  try:
93
93
  response.raise_for_status()
94
94
  except (requests.HTTPError, aiohttp.ClientResponseError) as exc:
95
- error_message = err_to_str(exc)
96
- if message:
97
- error_message = f"{error_message}: {message}"
95
+ error_message = err_to_str(exc) if not message else message
98
96
  status_code = (
99
97
  response.status_code
100
98
  if hasattr(response, "status_code")
@@ -12,11 +12,5 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
  #
15
- """
16
- MLRun provides a quick and easy integration into your code with mlrun.frameworks: a collection of sub-modules
17
- for the most commonly used machine and deep learning frameworks, providing features such as automatic logging,
18
- model management, and distributed training.
19
- """
20
-
21
15
  # flake8: noqa - this is until we take care of the F401 violations with respect to __all__ & sphinx
22
16
  from .parallel_coordinates import compare_db_runs, compare_run_objects
@@ -31,17 +31,12 @@ class ObjectStoreFactory(enum.Enum):
31
31
  def to_object_store(
32
32
  self,
33
33
  project: str,
34
- access_key: str = None,
35
- secret_provider: typing.Callable = None,
34
+ **kwargs,
36
35
  ) -> StoreBase:
37
36
  """
38
37
  Return a StoreBase object based on the provided enum value.
39
38
 
40
39
  :param project: The name of the project.
41
- :param access_key: Access key with permission to the DB table. Note that if access key is None
42
- and the endpoint target is from type KV then the access key will be
43
- retrieved from the environment variable.
44
- :param secret_provider: An optional secret provider to get the connection string secret.
45
40
 
46
41
  :return: `StoreBase` object.
47
42
 
@@ -50,10 +45,7 @@ class ObjectStoreFactory(enum.Enum):
50
45
  if self == self.v3io_nosql:
51
46
  from mlrun.model_monitoring.db.stores.v3io_kv.kv_store import KVStoreBase
52
47
 
53
- # Get V3IO access key from env
54
- access_key = access_key or mlrun.mlconf.get_v3io_access_key()
55
-
56
- return KVStoreBase(project=project, access_key=access_key)
48
+ return KVStoreBase(project=project)
57
49
 
58
50
  # Assuming SQL store target if store type is not KV.
59
51
  # Update these lines once there are more than two store target types.
@@ -62,7 +54,7 @@ class ObjectStoreFactory(enum.Enum):
62
54
 
63
55
  return SQLStoreBase(
64
56
  project=project,
65
- secret_provider=secret_provider,
57
+ **kwargs,
66
58
  )
67
59
 
68
60
  @classmethod
@@ -79,7 +71,7 @@ class ObjectStoreFactory(enum.Enum):
79
71
  def get_model_endpoint_store(
80
72
  project: str,
81
73
  access_key: str = None,
82
- secret_provider: typing.Callable = None,
74
+ secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
83
75
  ) -> StoreBase:
84
76
  # Leaving here for backwards compatibility
85
77
  warnings.warn(
@@ -95,24 +87,38 @@ def get_model_endpoint_store(
95
87
 
96
88
  def get_store_object(
97
89
  project: str,
98
- access_key: str = None,
99
- secret_provider: typing.Callable = None,
90
+ secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
91
+ **kwargs,
100
92
  ) -> StoreBase:
101
93
  """
102
- Getting the DB target type based on mlrun.config.model_endpoint_monitoring.store_type.
94
+ Generate a store object. If a connection string is provided, the store type will be updated according to the
95
+ connection string. Currently, the supported store types are SQL and v3io-nosql.
103
96
 
104
97
  :param project: The name of the project.
105
- :param access_key: Access key with permission to the DB table.
106
98
  :param secret_provider: An optional secret provider to get the connection string secret.
107
99
 
108
- :return: `StoreBase` object. Using this object, the user can apply different operations on the
109
- model monitoring record such as write, update, get and delete a model endpoint.
100
+ :return: `StoreBase` object. Using this object, the user can apply different operations such as write, update, get
101
+ and delete a model endpoint record.
110
102
  """
111
103
 
104
+ store_connection_string = mlrun.model_monitoring.helpers.get_connection_string(
105
+ secret_provider=secret_provider
106
+ )
107
+
108
+ if store_connection_string and (
109
+ store_connection_string.startswith("mysql")
110
+ or store_connection_string.startswith("sqlite")
111
+ ):
112
+ store_type = mlrun.common.schemas.model_monitoring.ModelEndpointTarget.SQL
113
+ kwargs["store_connection_string"] = store_connection_string
114
+ else:
115
+ # Set the default store type if no connection has been set
116
+ store_type = mlrun.mlconf.model_endpoint_monitoring.store_type
117
+
112
118
  # Get store type value from ObjectStoreFactory enum class
113
- store_type = ObjectStoreFactory(mlrun.mlconf.model_endpoint_monitoring.store_type)
119
+ store_type_fact = ObjectStoreFactory(store_type)
114
120
 
115
121
  # Convert into store target object
116
- return store_type.to_object_store(
117
- project=project, access_key=access_key, secret_provider=secret_provider
122
+ return store_type_fact.to_object_store(
123
+ project=project, secret_provider=secret_provider, **kwargs
118
124
  )
@@ -19,6 +19,7 @@ import mlrun.common.schemas.model_monitoring as mm_schemas
19
19
 
20
20
 
21
21
  class StoreBase(ABC):
22
+ type: typing.ClassVar[str]
22
23
  """
23
24
  An abstract class to handle the store object in the DB target.
24
25
  """
@@ -25,14 +25,15 @@ from sqlalchemy.sql.elements import BinaryExpression
25
25
 
26
26
  import mlrun.common.model_monitoring.helpers
27
27
  import mlrun.common.schemas.model_monitoring as mm_schemas
28
- import mlrun.model_monitoring.db
29
28
  import mlrun.model_monitoring.db.stores.sqldb.models
30
29
  import mlrun.model_monitoring.helpers
31
30
  from mlrun.common.db.sql_session import create_session, get_engine
31
+ from mlrun.model_monitoring.db import StoreBase
32
32
  from mlrun.utils import datetime_now, logger
33
33
 
34
34
 
35
- class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
35
+ class SQLStoreBase(StoreBase):
36
+ type: typing.ClassVar[str] = mm_schemas.ModelEndpointTarget.SQL
36
37
  """
37
38
  Handles the DB operations when the DB target is from type SQL. For the SQL operations, we use SQLAlchemy, a Python
38
39
  SQL toolkit that handles the communication with the database. When using SQL for storing the model monitoring
@@ -44,23 +45,22 @@ class SQLStoreBase(mlrun.model_monitoring.db.StoreBase):
44
45
  def __init__(
45
46
  self,
46
47
  project: str,
47
- secret_provider: typing.Callable = None,
48
+ **kwargs,
48
49
  ):
49
50
  """
50
51
  Initialize SQL store target object.
51
52
 
52
53
  :param project: The name of the project.
53
- :param secret_provider: An optional secret provider to get the connection string secret.
54
54
  """
55
55
 
56
56
  super().__init__(project=project)
57
57
 
58
- self._sql_connection_string = (
59
- mlrun.model_monitoring.helpers.get_connection_string(
60
- secret_provider=secret_provider
58
+ if "store_connection_string" not in kwargs:
59
+ raise mlrun.errors.MLRunInvalidArgumentError(
60
+ "connection_string is a required parameter for SQLStoreBase."
61
61
  )
62
- )
63
62
 
63
+ self._sql_connection_string = kwargs.get("store_connection_string")
64
64
  self._engine = get_engine(dsn=self._sql_connection_string)
65
65
 
66
66
  def _init_tables(self):
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
 
15
15
  import json
16
- import os
17
16
  import typing
18
17
  from dataclasses import dataclass
19
18
  from http import HTTPStatus
@@ -24,8 +23,8 @@ import v3io.dataplane.response
24
23
 
25
24
  import mlrun.common.model_monitoring.helpers
26
25
  import mlrun.common.schemas.model_monitoring as mm_schemas
27
- import mlrun.model_monitoring.db
28
26
  import mlrun.utils.v3io_clients
27
+ from mlrun.model_monitoring.db import StoreBase
29
28
  from mlrun.utils import logger
30
29
 
31
30
  # Fields to encode before storing in the KV table or to decode after retrieving
@@ -89,18 +88,21 @@ _KIND_TO_SCHEMA_PARAMS: dict[mm_schemas.WriterEventKind, SchemaParams] = {
89
88
  _EXCLUDE_SCHEMA_FILTER_EXPRESSION = '__name!=".#schema"'
90
89
 
91
90
 
92
- class KVStoreBase(mlrun.model_monitoring.db.StoreBase):
91
+ class KVStoreBase(StoreBase):
92
+ type: typing.ClassVar[str] = "v3io-nosql"
93
93
  """
94
94
  Handles the DB operations when the DB target is from type KV. For the KV operations, we use an instance of V3IO
95
95
  client and usually the KV table can be found under v3io:///users/pipelines/project-name/model-endpoints/endpoints/.
96
96
  """
97
97
 
98
- def __init__(self, project: str, access_key: typing.Optional[str] = None) -> None:
98
+ def __init__(
99
+ self,
100
+ project: str,
101
+ ) -> None:
99
102
  super().__init__(project=project)
100
103
  # Initialize a V3IO client instance
101
- self.access_key = access_key or os.environ.get("V3IO_ACCESS_KEY")
102
104
  self.client = mlrun.utils.v3io_clients.get_v3io_client(
103
- endpoint=mlrun.mlconf.v3io_api, access_key=self.access_key
105
+ endpoint=mlrun.mlconf.v3io_api,
104
106
  )
105
107
  # Get the KV table path and container
106
108
  self.path, self.container = self._get_path_and_container()
@@ -186,7 +188,6 @@ class KVStoreBase(mlrun.model_monitoring.db.StoreBase):
186
188
  table_path=self.path,
187
189
  key=endpoint_id,
188
190
  raise_for_status=v3io.dataplane.RaiseForStatus.never,
189
- access_key=self.access_key,
190
191
  )
191
192
  endpoint = endpoint.output.item
192
193
 
@@ -499,7 +500,6 @@ class KVStoreBase(mlrun.model_monitoring.db.StoreBase):
499
500
 
500
501
  def _get_frames_client(self):
501
502
  return mlrun.utils.v3io_clients.get_frames_client(
502
- token=self.access_key,
503
503
  address=mlrun.mlconf.v3io_framesd,
504
504
  container=self.container,
505
505
  )
@@ -65,7 +65,7 @@ class ObjectTSDBFactory(enum.Enum):
65
65
  def get_tsdb_connector(
66
66
  project: str,
67
67
  tsdb_connector_type: str = "",
68
- secret_provider: typing.Optional[typing.Callable] = None,
68
+ secret_provider: typing.Optional[typing.Callable[[str], str]] = None,
69
69
  **kwargs,
70
70
  ) -> TSDBConnector:
71
71
  """
@@ -25,7 +25,7 @@ from mlrun.utils import logger
25
25
 
26
26
 
27
27
  class TSDBConnector(ABC):
28
- type: str = ""
28
+ type: typing.ClassVar[str]
29
29
 
30
30
  def __init__(self, project: str):
31
31
  """
@@ -418,9 +418,8 @@ class V3IOTSDBConnector(TSDBConnector):
418
418
  f"Available tables: {list(self.tables.keys())}"
419
419
  )
420
420
 
421
- if agg_funcs:
422
- # Frames client expects the aggregators to be a comma-separated string
423
- aggregators = ",".join(agg_funcs)
421
+ # Frames client expects the aggregators to be a comma-separated string
422
+ aggregators = ",".join(agg_funcs) if agg_funcs else None
424
423
  table_path = self.tables[table]
425
424
  try:
426
425
  df = self._frames_client.read(