mlrun 1.5.0rc4__py3-none-any.whl → 1.5.0rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. mlrun/api/api/endpoints/datastore_profile.py +35 -13
  2. mlrun/api/api/endpoints/files.py +1 -1
  3. mlrun/api/api/endpoints/frontend_spec.py +1 -10
  4. mlrun/api/api/endpoints/functions.py +28 -18
  5. mlrun/api/api/endpoints/hub.py +2 -6
  6. mlrun/api/api/endpoints/pipelines.py +5 -1
  7. mlrun/api/api/endpoints/projects.py +1 -0
  8. mlrun/api/api/endpoints/workflows.py +1 -0
  9. mlrun/api/api/utils.py +18 -0
  10. mlrun/api/crud/client_spec.py +3 -0
  11. mlrun/api/crud/datastore_profiles.py +2 -2
  12. mlrun/api/crud/hub.py +158 -142
  13. mlrun/api/crud/model_monitoring/deployment.py +3 -0
  14. mlrun/api/crud/model_monitoring/model_endpoints.py +1 -1
  15. mlrun/api/crud/pipelines.py +10 -4
  16. mlrun/api/crud/workflows.py +11 -4
  17. mlrun/api/db/session.py +7 -2
  18. mlrun/api/db/sqldb/db.py +19 -21
  19. mlrun/api/db/sqldb/models/models_mysql.py +10 -1
  20. mlrun/api/db/sqldb/models/models_sqlite.py +11 -1
  21. mlrun/api/initial_data.py +3 -5
  22. mlrun/api/launcher.py +2 -1
  23. mlrun/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  24. mlrun/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  25. mlrun/api/rundb/sqldb.py +113 -61
  26. mlrun/api/utils/db/sqlite_migration.py +1 -0
  27. mlrun/common/model_monitoring/helpers.py +3 -1
  28. mlrun/common/schemas/client_spec.py +1 -0
  29. mlrun/common/schemas/datastore_profile.py +1 -1
  30. mlrun/common/schemas/frontend_spec.py +1 -1
  31. mlrun/config.py +3 -2
  32. mlrun/datastore/datastore_profile.py +33 -21
  33. mlrun/datastore/dbfs_store.py +9 -8
  34. mlrun/datastore/redis.py +6 -0
  35. mlrun/datastore/targets.py +12 -1
  36. mlrun/db/base.py +1 -1
  37. mlrun/db/factory.py +3 -0
  38. mlrun/db/httpdb.py +14 -13
  39. mlrun/db/nopdb.py +1 -1
  40. mlrun/feature_store/api.py +4 -1
  41. mlrun/feature_store/feature_set.py +3 -1
  42. mlrun/feature_store/ingestion.py +1 -0
  43. mlrun/kfpops.py +8 -2
  44. mlrun/launcher/base.py +1 -1
  45. mlrun/model.py +7 -5
  46. mlrun/projects/pipelines.py +7 -6
  47. mlrun/projects/project.py +2 -2
  48. mlrun/run.py +1 -1
  49. mlrun/runtimes/__init__.py +1 -0
  50. mlrun/utils/helpers.py +1 -1
  51. mlrun/utils/notifications/notification/webhook.py +9 -1
  52. mlrun/utils/version/version.json +2 -2
  53. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/METADATA +6 -5
  54. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/RECORD +58 -56
  55. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/LICENSE +0 -0
  56. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/WHEEL +0 -0
  57. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/entry_points.txt +0 -0
  58. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/top_level.txt +0 -0
mlrun/api/crud/hub.py CHANGED
@@ -15,6 +15,9 @@
15
15
  import json
16
16
  from typing import Any, Dict, List, Optional, Tuple
17
17
 
18
+ import sqlalchemy.orm
19
+
20
+ import mlrun.api.utils.singletons.db
18
21
  import mlrun.api.utils.singletons.k8s
19
22
  import mlrun.common.schemas
20
23
  import mlrun.common.schemas.hub
@@ -35,20 +38,6 @@ class Hub(metaclass=mlrun.utils.singleton.Singleton):
35
38
  self._internal_project_name = config.hub.k8s_secrets_project_name
36
39
  self._catalogs = {}
37
40
 
38
- @staticmethod
39
- def _in_k8s():
40
- k8s_helper = mlrun.api.utils.singletons.k8s.get_k8s_helper()
41
- return (
42
- k8s_helper is not None and k8s_helper.is_running_inside_kubernetes_cluster()
43
- )
44
-
45
- @staticmethod
46
- def _generate_credentials_secret_key(source, key=""):
47
- full_key = source + secret_name_separator + key
48
- return Secrets().generate_client_project_secret_key(
49
- SecretsClientType.hub, full_key
50
- )
51
-
52
41
  def add_source(self, source: mlrun.common.schemas.hub.HubSource):
53
42
  source_name = source.metadata.name
54
43
  credentials = source.spec.credentials
@@ -74,118 +63,6 @@ class Hub(metaclass=mlrun.utils.singleton.Singleton):
74
63
  allow_internal_secrets=True,
75
64
  )
76
65
 
77
- def _store_source_credentials(self, source_name, credentials: dict):
78
- if not self._in_k8s():
79
- raise mlrun.errors.MLRunInvalidArgumentError(
80
- "MLRun is not configured with k8s, hub source credentials cannot be stored securely"
81
- )
82
-
83
- adjusted_credentials = {
84
- self._generate_credentials_secret_key(source_name, key): value
85
- for key, value in credentials.items()
86
- }
87
- Secrets().store_project_secrets(
88
- self._internal_project_name,
89
- mlrun.common.schemas.SecretsData(
90
- provider=mlrun.common.schemas.SecretProviderName.kubernetes,
91
- secrets=adjusted_credentials,
92
- ),
93
- allow_internal_secrets=True,
94
- )
95
-
96
- def _get_source_credentials(self, source_name):
97
- if not self._in_k8s():
98
- return {}
99
-
100
- secret_prefix = self._generate_credentials_secret_key(source_name)
101
- secrets = (
102
- Secrets()
103
- .list_project_secrets(
104
- self._internal_project_name,
105
- mlrun.common.schemas.SecretProviderName.kubernetes,
106
- allow_secrets_from_k8s=True,
107
- allow_internal_secrets=True,
108
- )
109
- .secrets
110
- )
111
-
112
- source_secrets = {}
113
- for key, value in secrets.items():
114
- if key.startswith(secret_prefix):
115
- source_secrets[key[len(secret_prefix) :]] = value
116
-
117
- return source_secrets
118
-
119
- @staticmethod
120
- def _get_asset_full_path(
121
- source: mlrun.common.schemas.hub.HubSource,
122
- item: mlrun.common.schemas.hub.HubItem,
123
- asset: str,
124
- ):
125
- """
126
- Combining the item path with the asset path.
127
-
128
- :param source: Hub source object.
129
- :param item: The relevant item to get the asset from.
130
- :param asset: The asset name
131
- :return: Full path to the asset, relative to the item directory.
132
- """
133
- asset_path = item.spec.assets.get(asset, None)
134
- if not asset_path:
135
- raise mlrun.errors.MLRunNotFoundError(
136
- f"Asset={asset} not found. "
137
- f"item={item.metadata.name}, version={item.metadata.version}, tag={item.metadata.tag}"
138
- )
139
- item_path = item.metadata.get_relative_path()
140
- return source.get_full_uri(item_path + asset_path)
141
-
142
- @staticmethod
143
- def _transform_catalog_dict_to_schema(
144
- source: mlrun.common.schemas.hub.HubSource, catalog_dict: Dict[str, Any]
145
- ) -> mlrun.common.schemas.hub.HubCatalog:
146
- """
147
- Transforms catalog dictionary to HubCatalog schema
148
- :param source: Hub source object.
149
- :param catalog_dict: raw catalog dict, top level keys are item names,
150
- second level keys are version tags ("latest, "1.1.0", ...) and
151
- bottom level keys include spec as a dict and all the rest is considered as metadata.
152
- :return: catalog object
153
- """
154
- catalog = mlrun.common.schemas.hub.HubCatalog(
155
- catalog=[], channel=source.spec.channel
156
- )
157
- # Loop over objects, then over object versions.
158
- for object_name, object_dict in catalog_dict.items():
159
- for version_tag, version_dict in object_dict.items():
160
- object_details_dict = version_dict.copy()
161
- spec_dict = object_details_dict.pop("spec", {})
162
- assets = object_details_dict.pop("assets", {})
163
- # We want to align all item names to be normalized.
164
- # This is necessary since the item names are originally collected from the yaml files
165
- # which may can contain underscores.
166
- object_details_dict.update(
167
- {
168
- "name": mlrun.utils.helpers.normalize_name(
169
- object_name, verbose=False
170
- )
171
- }
172
- )
173
- metadata = mlrun.common.schemas.hub.HubItemMetadata(
174
- tag=version_tag, **object_details_dict
175
- )
176
- item_uri = source.get_full_uri(metadata.get_relative_path())
177
- spec = mlrun.common.schemas.hub.HubItemSpec(
178
- item_uri=item_uri, assets=assets, **spec_dict
179
- )
180
- item = mlrun.common.schemas.hub.HubItem(
181
- metadata=metadata,
182
- spec=spec,
183
- status=mlrun.common.schemas.ObjectStatus(),
184
- )
185
- catalog.catalog.append(item)
186
-
187
- return catalog
188
-
189
66
  def get_source_catalog(
190
67
  self,
191
68
  source: mlrun.common.schemas.hub.HubSource,
@@ -265,22 +142,6 @@ class Hub(metaclass=mlrun.utils.singleton.Singleton):
265
142
  )
266
143
  return items[0]
267
144
 
268
- @staticmethod
269
- def _get_catalog_items_filtered_by_name(
270
- catalog: List[mlrun.common.schemas.hub.HubItem],
271
- item_name: str,
272
- ) -> List[mlrun.common.schemas.hub.HubItem]:
273
- """
274
- Retrieve items from catalog filtered by name
275
-
276
- :param catalog: list of items
277
- :param item_name: item name to filter by
278
-
279
- :return: list of item objects from catalog
280
- """
281
- normalized_name = mlrun.utils.helpers.normalize_name(item_name, verbose=False)
282
- return [item for item in catalog if item.metadata.name == normalized_name]
283
-
284
145
  def get_item_object_using_source_credentials(
285
146
  self, source: mlrun.common.schemas.hub.HubSource, url
286
147
  ):
@@ -323,6 +184,19 @@ class Hub(metaclass=mlrun.utils.singleton.Singleton):
323
184
  asset_path,
324
185
  )
325
186
 
187
+ def list_hub_sources(
188
+ self,
189
+ db_session: sqlalchemy.orm.Session,
190
+ item_name: Optional[str] = None,
191
+ tag: Optional[str] = None,
192
+ version: Optional[str] = None,
193
+ ) -> List[mlrun.common.schemas.IndexedHubSource]:
194
+
195
+ hub_sources = mlrun.api.utils.singletons.db.get_db().list_hub_sources(
196
+ db_session
197
+ )
198
+ return self.filter_hub_sources(hub_sources, item_name, tag, version)
199
+
326
200
  def filter_hub_sources(
327
201
  self,
328
202
  sources: List[mlrun.common.schemas.IndexedHubSource],
@@ -360,3 +234,145 @@ class Hub(metaclass=mlrun.utils.singleton.Singleton):
360
234
  filtered_sources.append(source)
361
235
  break
362
236
  return filtered_sources
237
+
238
+ @staticmethod
239
+ def _in_k8s():
240
+ k8s_helper = mlrun.api.utils.singletons.k8s.get_k8s_helper()
241
+ return (
242
+ k8s_helper is not None and k8s_helper.is_running_inside_kubernetes_cluster()
243
+ )
244
+
245
+ @staticmethod
246
+ def _generate_credentials_secret_key(source, key=""):
247
+ full_key = source + secret_name_separator + key
248
+ return Secrets().generate_client_project_secret_key(
249
+ SecretsClientType.hub, full_key
250
+ )
251
+
252
+ def _store_source_credentials(self, source_name, credentials: dict):
253
+ if not self._in_k8s():
254
+ raise mlrun.errors.MLRunInvalidArgumentError(
255
+ "MLRun is not configured with k8s, hub source credentials cannot be stored securely"
256
+ )
257
+
258
+ adjusted_credentials = {
259
+ self._generate_credentials_secret_key(source_name, key): value
260
+ for key, value in credentials.items()
261
+ }
262
+ Secrets().store_project_secrets(
263
+ self._internal_project_name,
264
+ mlrun.common.schemas.SecretsData(
265
+ provider=mlrun.common.schemas.SecretProviderName.kubernetes,
266
+ secrets=adjusted_credentials,
267
+ ),
268
+ allow_internal_secrets=True,
269
+ )
270
+
271
+ def _get_source_credentials(self, source_name):
272
+ if not self._in_k8s():
273
+ return {}
274
+
275
+ secret_prefix = self._generate_credentials_secret_key(source_name)
276
+ secrets = (
277
+ Secrets()
278
+ .list_project_secrets(
279
+ self._internal_project_name,
280
+ mlrun.common.schemas.SecretProviderName.kubernetes,
281
+ allow_secrets_from_k8s=True,
282
+ allow_internal_secrets=True,
283
+ )
284
+ .secrets
285
+ )
286
+
287
+ source_secrets = {}
288
+ for key, value in secrets.items():
289
+ if key.startswith(secret_prefix):
290
+ source_secrets[key[len(secret_prefix) :]] = value
291
+
292
+ return source_secrets
293
+
294
+ @staticmethod
295
+ def _get_asset_full_path(
296
+ source: mlrun.common.schemas.hub.HubSource,
297
+ item: mlrun.common.schemas.hub.HubItem,
298
+ asset: str,
299
+ ):
300
+ """
301
+ Combining the item path with the asset path.
302
+
303
+ :param source: Hub source object.
304
+ :param item: The relevant item to get the asset from.
305
+ :param asset: The asset name
306
+ :return: Full path to the asset, relative to the item directory.
307
+ """
308
+ asset_path = item.spec.assets.get(asset, None)
309
+ if not asset_path:
310
+ raise mlrun.errors.MLRunNotFoundError(
311
+ f"Asset={asset} not found. "
312
+ f"item={item.metadata.name}, version={item.metadata.version}, tag={item.metadata.tag}"
313
+ )
314
+ item_path = item.metadata.get_relative_path()
315
+ return source.get_full_uri(item_path + asset_path)
316
+
317
+ @staticmethod
318
+ def _transform_catalog_dict_to_schema(
319
+ source: mlrun.common.schemas.hub.HubSource, catalog_dict: Dict[str, Any]
320
+ ) -> mlrun.common.schemas.hub.HubCatalog:
321
+ """
322
+ Transforms catalog dictionary to HubCatalog schema
323
+ :param source: Hub source object.
324
+ :param catalog_dict: raw catalog dict, top level keys are item names,
325
+ second level keys are version tags ("latest, "1.1.0", ...) and
326
+ bottom level keys include spec as a dict and all the rest is considered as metadata.
327
+ :return: catalog object
328
+ """
329
+ catalog = mlrun.common.schemas.hub.HubCatalog(
330
+ catalog=[], channel=source.spec.channel
331
+ )
332
+ # Loop over objects, then over object versions.
333
+ for object_name, object_dict in catalog_dict.items():
334
+ for version_tag, version_dict in object_dict.items():
335
+ object_details_dict = version_dict.copy()
336
+ spec_dict = object_details_dict.pop("spec", {})
337
+ assets = object_details_dict.pop("assets", {})
338
+ # We want to align all item names to be normalized.
339
+ # This is necessary since the item names are originally collected from the yaml files
340
+ # which may can contain underscores.
341
+ object_details_dict.update(
342
+ {
343
+ "name": mlrun.utils.helpers.normalize_name(
344
+ object_name, verbose=False
345
+ )
346
+ }
347
+ )
348
+ metadata = mlrun.common.schemas.hub.HubItemMetadata(
349
+ tag=version_tag, **object_details_dict
350
+ )
351
+ item_uri = source.get_full_uri(metadata.get_relative_path())
352
+ spec = mlrun.common.schemas.hub.HubItemSpec(
353
+ item_uri=item_uri, assets=assets, **spec_dict
354
+ )
355
+ item = mlrun.common.schemas.hub.HubItem(
356
+ metadata=metadata,
357
+ spec=spec,
358
+ status=mlrun.common.schemas.ObjectStatus(),
359
+ )
360
+ catalog.catalog.append(item)
361
+
362
+ return catalog
363
+
364
+ @staticmethod
365
+ def _get_catalog_items_filtered_by_name(
366
+ catalog: List[mlrun.common.schemas.hub.HubItem],
367
+ item_name: str,
368
+ ) -> List[mlrun.common.schemas.hub.HubItem]:
369
+ """
370
+ Retrieve items from catalog filtered by name
371
+
372
+ :param catalog: list of items
373
+ :param item_name: item name to filter by
374
+
375
+ :return: list of item objects from catalog
376
+ """
377
+ normalized_name = mlrun.utils.helpers.normalize_name(item_name, verbose=False)
378
+ return [item for item in catalog if item.metadata.name == normalized_name]
@@ -216,6 +216,7 @@ class MonitoringDeployment:
216
216
  fn = self._get_model_monitoring_batch_function(
217
217
  project=project,
218
218
  model_monitoring_access_key=model_monitoring_access_key,
219
+ db_session=db_session,
219
220
  auth_info=auth_info,
220
221
  tracking_policy=tracking_policy,
221
222
  )
@@ -319,6 +320,7 @@ class MonitoringDeployment:
319
320
  self,
320
321
  project: str,
321
322
  model_monitoring_access_key: str,
323
+ db_session: sqlalchemy.orm.Session,
322
324
  auth_info: mlrun.common.schemas.AuthInfo,
323
325
  tracking_policy: mlrun.model_monitoring.tracking_policy.TrackingPolicy,
324
326
  ):
@@ -345,6 +347,7 @@ class MonitoringDeployment:
345
347
  image=tracking_policy.default_batch_image,
346
348
  handler="handler",
347
349
  )
350
+ function.set_db_connection(mlrun.api.api.utils.get_run_db_instance(db_session))
348
351
 
349
352
  # Set the project to the job function
350
353
  function.metadata.project = project
@@ -93,7 +93,7 @@ class ModelEndpoints:
93
93
  logger.info(
94
94
  "Getting model object, inferring column names and collecting feature stats"
95
95
  )
96
- run_db = mlrun.get_run_db()
96
+ run_db = mlrun.api.api.utils.get_run_db_instance(db_session)
97
97
  model_obj: mlrun.artifacts.ModelArtifact = (
98
98
  mlrun.datastore.store_resources.get_store_resource(
99
99
  model_endpoint.spec.model_uri, db=run_db
@@ -95,7 +95,7 @@ class Pipelines(
95
95
  runs = [run.to_dict() for run in response.runs or []]
96
96
  total_size = response.total_size
97
97
  next_page_token = response.next_page_token
98
- runs = self._format_runs(runs, format_)
98
+ runs = self._format_runs(db_session, runs, format_)
99
99
 
100
100
  return total_size, next_page_token, runs
101
101
 
@@ -131,6 +131,7 @@ class Pipelines(
131
131
 
132
132
  def get_pipeline(
133
133
  self,
134
+ db_session: sqlalchemy.orm.Session,
134
135
  run_id: str,
135
136
  project: typing.Optional[str] = None,
136
137
  namespace: typing.Optional[str] = None,
@@ -148,7 +149,9 @@ class Pipelines(
148
149
  raise mlrun.errors.MLRunNotFoundError(
149
150
  f"Pipeline run with id {run_id} is not of project {project}"
150
151
  )
151
- run = self._format_run(run, format_, api_run_detail.to_dict())
152
+ run = self._format_run(
153
+ db_session, run, format_, api_run_detail.to_dict()
154
+ )
152
155
  except kfp_server_api.ApiException as exc:
153
156
  mlrun.errors.raise_for_status_code(int(exc.status), err_to_str(exc))
154
157
  except mlrun.errors.MLRunHTTPStatusError:
@@ -225,16 +228,18 @@ class Pipelines(
225
228
 
226
229
  def _format_runs(
227
230
  self,
231
+ db_session: sqlalchemy.orm.Session,
228
232
  runs: typing.List[dict],
229
233
  format_: mlrun.common.schemas.PipelinesFormat = mlrun.common.schemas.PipelinesFormat.metadata_only,
230
234
  ) -> typing.List[dict]:
231
235
  formatted_runs = []
232
236
  for run in runs:
233
- formatted_runs.append(self._format_run(run, format_))
237
+ formatted_runs.append(self._format_run(db_session, run, format_))
234
238
  return formatted_runs
235
239
 
236
240
  def _format_run(
237
241
  self,
242
+ db_session: sqlalchemy.orm.Session,
238
243
  run: dict,
239
244
  format_: mlrun.common.schemas.PipelinesFormat = mlrun.common.schemas.PipelinesFormat.metadata_only,
240
245
  api_run_detail: typing.Optional[dict] = None,
@@ -252,8 +257,9 @@ class Pipelines(
252
257
  raise mlrun.errors.MLRunRuntimeError(
253
258
  "The full kfp api_run_detail object is needed to generate the summary format"
254
259
  )
260
+ run_db = mlrun.api.api.utils.get_run_db_instance(db_session)
255
261
  return mlrun.kfpops.format_summary_from_kfp_run(
256
- api_run_detail, run["project"]
262
+ api_run_detail, run["project"], run_db=run_db
257
263
  )
258
264
  else:
259
265
  raise NotImplementedError(
@@ -18,14 +18,15 @@ from typing import Dict
18
18
  from sqlalchemy.orm import Session
19
19
 
20
20
  import mlrun.common.schemas
21
- import mlrun.db
22
21
  import mlrun.utils.singleton
23
22
  from mlrun.api.api.utils import (
24
23
  apply_enrichment_and_validation_on_function,
24
+ get_run_db_instance,
25
25
  get_scheduler,
26
26
  )
27
27
  from mlrun.config import config
28
28
  from mlrun.model import Credentials, RunMetadata, RunObject, RunSpec
29
+ from mlrun.utils import fill_project_path_template
29
30
 
30
31
 
31
32
  class WorkflowRunners(
@@ -35,6 +36,7 @@ class WorkflowRunners(
35
36
  def create_runner(
36
37
  run_name: str,
37
38
  project: str,
39
+ db_session: Session,
38
40
  auth_info: mlrun.common.schemas.AuthInfo,
39
41
  image: str,
40
42
  ) -> mlrun.run.KubejobRuntime:
@@ -58,6 +60,8 @@ class WorkflowRunners(
58
60
  image=image,
59
61
  )
60
62
 
63
+ runner.set_db_connection(get_run_db_instance(db_session))
64
+
61
65
  # Enrichment and validation requires access key
62
66
  runner.metadata.credentials.access_key = Credentials.generate_access_key
63
67
 
@@ -158,9 +162,12 @@ class WorkflowRunners(
158
162
  ),
159
163
  handler="mlrun.projects.load_and_run",
160
164
  scrape_metrics=config.scrape_metrics,
161
- output_path=(
162
- workflow_request.artifact_path or config.artifact_path
163
- ).replace("{{run.uid}}", meta_uid),
165
+ output_path=fill_project_path_template(
166
+ (workflow_request.artifact_path or config.artifact_path).replace(
167
+ "{{run.uid}}", meta_uid
168
+ ),
169
+ project.metadata.name,
170
+ ),
164
171
  ),
165
172
  metadata=RunMetadata(
166
173
  uid=meta_uid, name=workflow_spec.name, project=project.metadata.name
mlrun/api/db/session.py CHANGED
@@ -25,10 +25,15 @@ def close_session(db_session):
25
25
  db_session.close()
26
26
 
27
27
 
28
- def run_function_with_new_db_session(func):
28
+ def run_function_with_new_db_session(func, *args, **kwargs):
29
+ """
30
+ Run a function with a new db session, useful for cuncurrent requests where we can't share a single session.
31
+ However, any changes made by the new session will not be visible to old sessions until the old sessions commit
32
+ due to isolation level.
33
+ """
29
34
  session = create_session()
30
35
  try:
31
- result = func(session)
36
+ result = func(session, *args, **kwargs)
32
37
  return result
33
38
  finally:
34
39
  close_session(session)
mlrun/api/db/sqldb/db.py CHANGED
@@ -110,7 +110,6 @@ def retry_on_conflict(function):
110
110
  try:
111
111
  return function(*args, **kwargs)
112
112
  except Exception as exc:
113
-
114
113
  if mlrun.utils.helpers.are_strings_in_exception_chain_messages(
115
114
  exc, conflict_messages
116
115
  ):
@@ -140,11 +139,8 @@ def retry_on_conflict(function):
140
139
 
141
140
 
142
141
  class SQLDB(DBInterface):
143
- def __init__(self, dsn):
142
+ def __init__(self, dsn=""):
144
143
  self.dsn = dsn
145
- self._cache = {
146
- "project_resources_counters": {"value": None, "ttl": datetime.min}
147
- }
148
144
  self._name_with_iter_regex = re.compile("^[0-9]+-.+$")
149
145
 
150
146
  def initialize(self, session):
@@ -1102,7 +1098,6 @@ class SQLDB(DBInterface):
1102
1098
  if not tag:
1103
1099
  function_tags = self._list_function_tags(session, project, function.id)
1104
1100
  if len(function_tags) == 0:
1105
-
1106
1101
  # function status should be added only to tagged functions
1107
1102
  function_dict["status"] = None
1108
1103
 
@@ -1244,7 +1239,6 @@ class SQLDB(DBInterface):
1244
1239
  labels: Dict = None,
1245
1240
  next_run_time: datetime = None,
1246
1241
  ) -> mlrun.common.schemas.ScheduleRecord:
1247
-
1248
1242
  schedule_record = self._create_schedule_db_record(
1249
1243
  project=project,
1250
1244
  name=name,
@@ -2216,7 +2210,6 @@ class SQLDB(DBInterface):
2216
2210
  partition_order: mlrun.common.schemas.OrderType,
2217
2211
  max_partitions: int = 0,
2218
2212
  ):
2219
-
2220
2213
  partition_field = partition_by.to_partition_by_db_field(cls)
2221
2214
  sort_by_field = partition_sort_by.to_db_field(cls)
2222
2215
 
@@ -2484,7 +2477,6 @@ class SQLDB(DBInterface):
2484
2477
  if uid == existing_feature_set.uid or always_overwrite:
2485
2478
  db_feature_set = existing_feature_set
2486
2479
  else:
2487
-
2488
2480
  # In case an object with the given tag (or 'latest' which is the default) and name, but different uid
2489
2481
  # was found - Check If an object with the same computed uid but different tag already exists
2490
2482
  # and re-tag it.
@@ -2814,7 +2806,6 @@ class SQLDB(DBInterface):
2814
2806
  if uid == existing_feature_vector.uid or always_overwrite:
2815
2807
  db_feature_vector = existing_feature_vector
2816
2808
  else:
2817
-
2818
2809
  # In case an object with the given tag (or 'latest' which is the default) and name, but different uid
2819
2810
  # was found - Check If an object with the same computed uid but different tag already exists
2820
2811
  # and re-tag it.
@@ -3862,7 +3853,6 @@ class SQLDB(DBInterface):
3862
3853
  run_uid: str,
3863
3854
  project: str = "",
3864
3855
  ) -> typing.List[mlrun.model.Notification]:
3865
-
3866
3856
  # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3867
3857
  run = self._get_run(session, run_uid, project, 0)
3868
3858
  if not run:
@@ -3885,7 +3875,6 @@ class SQLDB(DBInterface):
3885
3875
  ):
3886
3876
  run_id = None
3887
3877
  if run_uid:
3888
-
3889
3878
  # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3890
3879
  run = self._get_run(session, run_uid, project, 0)
3891
3880
  if not run:
@@ -3947,6 +3936,17 @@ class SQLDB(DBInterface):
3947
3936
  )
3948
3937
  self._commit(session, [run], ignore=True)
3949
3938
 
3939
+ @staticmethod
3940
+ def _transform_datastore_profile_model_to_schema(
3941
+ db_object,
3942
+ ) -> mlrun.common.schemas.DatastoreProfile:
3943
+ return mlrun.common.schemas.DatastoreProfile(
3944
+ name=db_object.name,
3945
+ type=db_object.type,
3946
+ object=db_object.full_object,
3947
+ project=db_object.project,
3948
+ )
3949
+
3950
3950
  def store_datastore_profile(
3951
3951
  self, session, info: mlrun.common.schemas.DatastoreProfile
3952
3952
  ):
@@ -3959,18 +3959,17 @@ class SQLDB(DBInterface):
3959
3959
  info.project = info.project or config.default_project
3960
3960
  profile = self._query(
3961
3961
  session, DatastoreProfile, name=info.name, project=info.project
3962
- )
3963
- first = profile.first()
3964
- if first:
3965
- first.type = info.type
3966
- first.body = info.body
3962
+ ).one_or_none()
3963
+ if profile:
3964
+ profile.type = info.type
3965
+ profile.full_object = info.object
3967
3966
  self._commit(session, [profile])
3968
3967
  else:
3969
3968
  profile = DatastoreProfile(
3970
3969
  name=info.name,
3971
3970
  type=info.type,
3972
3971
  project=info.project,
3973
- body=info.body,
3972
+ full_object=info.object,
3974
3973
  )
3975
3974
  self._upsert(session, [profile])
3976
3975
 
@@ -3990,8 +3989,7 @@ class SQLDB(DBInterface):
3990
3989
  project = project or config.default_project
3991
3990
  res = self._query(session, DatastoreProfile, name=profile, project=project)
3992
3991
  if res.first():
3993
- r = res.first().to_dict(exclude=["id"])
3994
- return mlrun.common.schemas.DatastoreProfile(**r)
3992
+ return self._transform_datastore_profile_model_to_schema(res.first())
3995
3993
  else:
3996
3994
  raise mlrun.errors.MLRunNotFoundError(
3997
3995
  f"Datastore profile '{profile}' not found in project '{project}'"
@@ -4027,7 +4025,7 @@ class SQLDB(DBInterface):
4027
4025
  project = project or config.default_project
4028
4026
  query_results = self._query(session, DatastoreProfile, project=project)
4029
4027
  return [
4030
- mlrun.common.schemas.DatastoreProfile(**query.to_dict(exclude=["id"]))
4028
+ self._transform_datastore_profile_model_to_schema(query)
4031
4029
  for query in query_results
4032
4030
  ]
4033
4031
 
@@ -532,7 +532,16 @@ with warnings.catch_warnings():
532
532
  name = Column(String(255, collation=SQLCollationUtil.collation()))
533
533
  project = Column(String(255, collation=SQLCollationUtil.collation()))
534
534
  type = Column(String(255, collation=SQLCollationUtil.collation()))
535
- body = Column(String(1024, collation=SQLCollationUtil.collation()))
535
+ _full_object = Column("object", JSON)
536
+
537
+ @property
538
+ def full_object(self):
539
+ if self._full_object:
540
+ return json.loads(self._full_object)
541
+
542
+ @full_object.setter
543
+ def full_object(self, value):
544
+ self._full_object = json.dumps(value, default=str)
536
545
 
537
546
 
538
547
  # Must be after all table definitions
@@ -488,7 +488,17 @@ with warnings.catch_warnings():
488
488
  name = Column(String(255, collation=SQLCollationUtil.collation()))
489
489
  type = Column(String(255, collation=SQLCollationUtil.collation()))
490
490
  project = Column(String(255, collation=SQLCollationUtil.collation()))
491
- body = Column(String(1024, collation=SQLCollationUtil.collation()))
491
+
492
+ _full_object = Column("object", JSON)
493
+
494
+ @property
495
+ def full_object(self):
496
+ if self._full_object:
497
+ return json.loads(self._full_object)
498
+
499
+ @full_object.setter
500
+ def full_object(self, value):
501
+ self._full_object = json.dumps(value, default=str)
492
502
 
493
503
 
494
504
  # Must be after all table definitions