mlrun 1.5.0rc5__py3-none-any.whl → 1.5.0rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. mlrun/api/api/endpoints/datastore_profile.py +35 -13
  2. mlrun/api/api/endpoints/frontend_spec.py +1 -10
  3. mlrun/api/api/endpoints/functions.py +1 -1
  4. mlrun/api/api/endpoints/hub.py +2 -6
  5. mlrun/api/crud/client_spec.py +3 -0
  6. mlrun/api/crud/datastore_profiles.py +2 -2
  7. mlrun/api/crud/hub.py +158 -142
  8. mlrun/api/crud/workflows.py +7 -3
  9. mlrun/api/db/sqldb/db.py +19 -21
  10. mlrun/api/db/sqldb/models/models_mysql.py +10 -1
  11. mlrun/api/db/sqldb/models/models_sqlite.py +11 -1
  12. mlrun/api/initial_data.py +3 -5
  13. mlrun/api/launcher.py +2 -1
  14. mlrun/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  15. mlrun/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  16. mlrun/api/rundb/sqldb.py +15 -9
  17. mlrun/api/utils/db/sqlite_migration.py +1 -0
  18. mlrun/common/model_monitoring/helpers.py +3 -1
  19. mlrun/common/schemas/client_spec.py +1 -0
  20. mlrun/common/schemas/datastore_profile.py +1 -1
  21. mlrun/common/schemas/frontend_spec.py +1 -1
  22. mlrun/config.py +3 -2
  23. mlrun/datastore/datastore_profile.py +33 -21
  24. mlrun/datastore/dbfs_store.py +4 -3
  25. mlrun/datastore/redis.py +6 -0
  26. mlrun/datastore/targets.py +12 -1
  27. mlrun/db/base.py +1 -1
  28. mlrun/db/httpdb.py +10 -9
  29. mlrun/db/nopdb.py +1 -1
  30. mlrun/feature_store/api.py +4 -1
  31. mlrun/feature_store/feature_set.py +3 -1
  32. mlrun/feature_store/ingestion.py +1 -0
  33. mlrun/launcher/base.py +1 -1
  34. mlrun/model.py +7 -5
  35. mlrun/projects/pipelines.py +7 -6
  36. mlrun/projects/project.py +2 -2
  37. mlrun/run.py +1 -1
  38. mlrun/runtimes/__init__.py +1 -0
  39. mlrun/utils/helpers.py +1 -1
  40. mlrun/utils/notifications/notification/webhook.py +9 -1
  41. mlrun/utils/version/version.json +2 -2
  42. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/METADATA +6 -5
  43. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/RECORD +47 -45
  44. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/LICENSE +0 -0
  45. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/WHEEL +0 -0
  46. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/entry_points.txt +0 -0
  47. {mlrun-1.5.0rc5.dist-info → mlrun-1.5.0rc6.dist-info}/top_level.txt +0 -0
mlrun/api/db/sqldb/db.py CHANGED
@@ -110,7 +110,6 @@ def retry_on_conflict(function):
110
110
  try:
111
111
  return function(*args, **kwargs)
112
112
  except Exception as exc:
113
-
114
113
  if mlrun.utils.helpers.are_strings_in_exception_chain_messages(
115
114
  exc, conflict_messages
116
115
  ):
@@ -140,11 +139,8 @@ def retry_on_conflict(function):
140
139
 
141
140
 
142
141
  class SQLDB(DBInterface):
143
- def __init__(self, dsn):
142
+ def __init__(self, dsn=""):
144
143
  self.dsn = dsn
145
- self._cache = {
146
- "project_resources_counters": {"value": None, "ttl": datetime.min}
147
- }
148
144
  self._name_with_iter_regex = re.compile("^[0-9]+-.+$")
149
145
 
150
146
  def initialize(self, session):
@@ -1102,7 +1098,6 @@ class SQLDB(DBInterface):
1102
1098
  if not tag:
1103
1099
  function_tags = self._list_function_tags(session, project, function.id)
1104
1100
  if len(function_tags) == 0:
1105
-
1106
1101
  # function status should be added only to tagged functions
1107
1102
  function_dict["status"] = None
1108
1103
 
@@ -1244,7 +1239,6 @@ class SQLDB(DBInterface):
1244
1239
  labels: Dict = None,
1245
1240
  next_run_time: datetime = None,
1246
1241
  ) -> mlrun.common.schemas.ScheduleRecord:
1247
-
1248
1242
  schedule_record = self._create_schedule_db_record(
1249
1243
  project=project,
1250
1244
  name=name,
@@ -2216,7 +2210,6 @@ class SQLDB(DBInterface):
2216
2210
  partition_order: mlrun.common.schemas.OrderType,
2217
2211
  max_partitions: int = 0,
2218
2212
  ):
2219
-
2220
2213
  partition_field = partition_by.to_partition_by_db_field(cls)
2221
2214
  sort_by_field = partition_sort_by.to_db_field(cls)
2222
2215
 
@@ -2484,7 +2477,6 @@ class SQLDB(DBInterface):
2484
2477
  if uid == existing_feature_set.uid or always_overwrite:
2485
2478
  db_feature_set = existing_feature_set
2486
2479
  else:
2487
-
2488
2480
  # In case an object with the given tag (or 'latest' which is the default) and name, but different uid
2489
2481
  # was found - Check If an object with the same computed uid but different tag already exists
2490
2482
  # and re-tag it.
@@ -2814,7 +2806,6 @@ class SQLDB(DBInterface):
2814
2806
  if uid == existing_feature_vector.uid or always_overwrite:
2815
2807
  db_feature_vector = existing_feature_vector
2816
2808
  else:
2817
-
2818
2809
  # In case an object with the given tag (or 'latest' which is the default) and name, but different uid
2819
2810
  # was found - Check If an object with the same computed uid but different tag already exists
2820
2811
  # and re-tag it.
@@ -3862,7 +3853,6 @@ class SQLDB(DBInterface):
3862
3853
  run_uid: str,
3863
3854
  project: str = "",
3864
3855
  ) -> typing.List[mlrun.model.Notification]:
3865
-
3866
3856
  # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3867
3857
  run = self._get_run(session, run_uid, project, 0)
3868
3858
  if not run:
@@ -3885,7 +3875,6 @@ class SQLDB(DBInterface):
3885
3875
  ):
3886
3876
  run_id = None
3887
3877
  if run_uid:
3888
-
3889
3878
  # iteration is 0, as we don't support multiple notifications per hyper param run, only for the whole run
3890
3879
  run = self._get_run(session, run_uid, project, 0)
3891
3880
  if not run:
@@ -3947,6 +3936,17 @@ class SQLDB(DBInterface):
3947
3936
  )
3948
3937
  self._commit(session, [run], ignore=True)
3949
3938
 
3939
+ @staticmethod
3940
+ def _transform_datastore_profile_model_to_schema(
3941
+ db_object,
3942
+ ) -> mlrun.common.schemas.DatastoreProfile:
3943
+ return mlrun.common.schemas.DatastoreProfile(
3944
+ name=db_object.name,
3945
+ type=db_object.type,
3946
+ object=db_object.full_object,
3947
+ project=db_object.project,
3948
+ )
3949
+
3950
3950
  def store_datastore_profile(
3951
3951
  self, session, info: mlrun.common.schemas.DatastoreProfile
3952
3952
  ):
@@ -3959,18 +3959,17 @@ class SQLDB(DBInterface):
3959
3959
  info.project = info.project or config.default_project
3960
3960
  profile = self._query(
3961
3961
  session, DatastoreProfile, name=info.name, project=info.project
3962
- )
3963
- first = profile.first()
3964
- if first:
3965
- first.type = info.type
3966
- first.body = info.body
3962
+ ).one_or_none()
3963
+ if profile:
3964
+ profile.type = info.type
3965
+ profile.full_object = info.object
3967
3966
  self._commit(session, [profile])
3968
3967
  else:
3969
3968
  profile = DatastoreProfile(
3970
3969
  name=info.name,
3971
3970
  type=info.type,
3972
3971
  project=info.project,
3973
- body=info.body,
3972
+ full_object=info.object,
3974
3973
  )
3975
3974
  self._upsert(session, [profile])
3976
3975
 
@@ -3990,8 +3989,7 @@ class SQLDB(DBInterface):
3990
3989
  project = project or config.default_project
3991
3990
  res = self._query(session, DatastoreProfile, name=profile, project=project)
3992
3991
  if res.first():
3993
- r = res.first().to_dict(exclude=["id"])
3994
- return mlrun.common.schemas.DatastoreProfile(**r)
3992
+ return self._transform_datastore_profile_model_to_schema(res.first())
3995
3993
  else:
3996
3994
  raise mlrun.errors.MLRunNotFoundError(
3997
3995
  f"Datastore profile '{profile}' not found in project '{project}'"
@@ -4027,7 +4025,7 @@ class SQLDB(DBInterface):
4027
4025
  project = project or config.default_project
4028
4026
  query_results = self._query(session, DatastoreProfile, project=project)
4029
4027
  return [
4030
- mlrun.common.schemas.DatastoreProfile(**query.to_dict(exclude=["id"]))
4028
+ self._transform_datastore_profile_model_to_schema(query)
4031
4029
  for query in query_results
4032
4030
  ]
4033
4031
 
@@ -532,7 +532,16 @@ with warnings.catch_warnings():
532
532
  name = Column(String(255, collation=SQLCollationUtil.collation()))
533
533
  project = Column(String(255, collation=SQLCollationUtil.collation()))
534
534
  type = Column(String(255, collation=SQLCollationUtil.collation()))
535
- body = Column(String(1024, collation=SQLCollationUtil.collation()))
535
+ _full_object = Column("object", JSON)
536
+
537
+ @property
538
+ def full_object(self):
539
+ if self._full_object:
540
+ return json.loads(self._full_object)
541
+
542
+ @full_object.setter
543
+ def full_object(self, value):
544
+ self._full_object = json.dumps(value, default=str)
536
545
 
537
546
 
538
547
  # Must be after all table definitions
@@ -488,7 +488,17 @@ with warnings.catch_warnings():
488
488
  name = Column(String(255, collation=SQLCollationUtil.collation()))
489
489
  type = Column(String(255, collation=SQLCollationUtil.collation()))
490
490
  project = Column(String(255, collation=SQLCollationUtil.collation()))
491
- body = Column(String(1024, collation=SQLCollationUtil.collation()))
491
+
492
+ _full_object = Column("object", JSON)
493
+
494
+ @property
495
+ def full_object(self):
496
+ if self._full_object:
497
+ return json.loads(self._full_object)
498
+
499
+ @full_object.setter
500
+ def full_object(self, value):
501
+ self._full_object = json.dumps(value, default=str)
492
502
 
493
503
 
494
504
  # Must be after all table definitions
mlrun/api/initial_data.py CHANGED
@@ -192,7 +192,7 @@ def _perform_schema_migrations(alembic_util: mlrun.api.utils.db.alembic.AlembicU
192
192
 
193
193
  def _is_latest_data_version():
194
194
  db_session = create_session()
195
- db = mlrun.api.db.sqldb.db.SQLDB("")
195
+ db = mlrun.api.db.sqldb.db.SQLDB()
196
196
 
197
197
  try:
198
198
  current_data_version = _resolve_current_data_version(db, db_session)
@@ -214,8 +214,7 @@ def _perform_database_migration(
214
214
 
215
215
  def _perform_data_migrations(db_session: sqlalchemy.orm.Session):
216
216
  if config.httpdb.db.data_migrations_mode == "enabled":
217
- # FileDB is not really a thing anymore, so using SQLDB directly
218
- db = mlrun.api.db.sqldb.db.SQLDB("")
217
+ db = mlrun.api.db.sqldb.db.SQLDB()
219
218
  current_data_version = int(db.get_current_data_version(db_session))
220
219
  if current_data_version != latest_data_version:
221
220
  logger.info(
@@ -235,8 +234,7 @@ def _perform_data_migrations(db_session: sqlalchemy.orm.Session):
235
234
 
236
235
 
237
236
  def _add_initial_data(db_session: sqlalchemy.orm.Session):
238
- # FileDB is not really a thing anymore, so using SQLDB directly
239
- db = mlrun.api.db.sqldb.db.SQLDB("")
237
+ db = mlrun.api.db.sqldb.db.SQLDB()
240
238
  _add_default_hub_source_if_needed(db, db_session)
241
239
  _add_data_version(db, db_session)
242
240
 
mlrun/api/launcher.py CHANGED
@@ -203,8 +203,9 @@ class ServerSideLauncher(launcher.BaseLauncher):
203
203
  # in normal use cases if no project is found we will get an error
204
204
  if project:
205
205
  project = mlrun.projects.project.MlrunProject.from_dict(project.dict())
206
+ # there is no need to auto mount here as it was already done in the full spec enrichment with the auth info
206
207
  mlrun.projects.pipelines.enrich_function_object(
207
- project, runtime, copy_function=False
208
+ project, runtime, copy_function=False, try_auto_mount=False
208
209
  )
209
210
 
210
211
  def _enrich_full_spec(
@@ -0,0 +1,46 @@
1
+ # Copyright 2023 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+
16
+ """altering table datastore_profiles 2
17
+
18
+ Revision ID: 026c947c4487
19
+ Revises: b1d1e7ab5dec
20
+ Create Date: 2023-08-10 14:15:30.523729
21
+
22
+ """
23
+ import sqlalchemy as sa
24
+ from alembic import op
25
+
26
+ # revision identifiers, used by Alembic.
27
+ revision = "026c947c4487"
28
+ down_revision = "b1d1e7ab5dec"
29
+ branch_labels = None
30
+ depends_on = None
31
+
32
+
33
+ def upgrade():
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ op.add_column("datastore_profiles", sa.Column("object", sa.JSON(), nullable=True))
36
+ op.drop_column("datastore_profiles", "body")
37
+ # ### end Alembic commands ###
38
+
39
+
40
+ def downgrade():
41
+ # ### commands auto generated by Alembic - please adjust! ###
42
+ op.add_column(
43
+ "datastore_profiles", sa.Column("body", sa.String(length=1024), nullable=True)
44
+ )
45
+ op.drop_column("datastore_profiles", "object")
46
+ # ### end Alembic commands ###
@@ -0,0 +1,46 @@
1
+ # Copyright 2023 Iguazio
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ #
15
+
16
+ """altering table datastore_profiles 2
17
+
18
+ Revision ID: 026c947c4487
19
+ Revises: 6e0c9531edc7
20
+ Create Date: 2023-08-10 14:15:30.523729
21
+
22
+ """
23
+ import sqlalchemy as sa
24
+ from alembic import op
25
+
26
+ # revision identifiers, used by Alembic.
27
+ revision = "026c947c4487"
28
+ down_revision = "6e0c9531edc7"
29
+ branch_labels = None
30
+ depends_on = None
31
+
32
+
33
+ def upgrade():
34
+ # ### commands auto generated by Alembic - please adjust! ###
35
+ with op.batch_alter_table("datastore_profiles") as batch_op:
36
+ batch_op.add_column(sa.Column("object", sa.JSON(), nullable=True))
37
+ batch_op.drop_column("body")
38
+ # ### end Alembic commands ###
39
+
40
+
41
+ def downgrade():
42
+ # ### commands auto generated by Alembic - please adjust! ###
43
+ with op.batch_alter_table("datastore_profiles") as batch_op:
44
+ batch_op.add_column(sa.Column("body", sa.String(length=1024), nullable=True))
45
+ batch_op.drop_column("object")
46
+ # ### end Alembic commands ###
mlrun/api/rundb/sqldb.py CHANGED
@@ -717,6 +717,20 @@ class SQLRunDB(RunDBInterface):
717
717
  function,
718
718
  )
719
719
 
720
+ def list_hub_sources(
721
+ self,
722
+ item_name: Optional[str] = None,
723
+ tag: Optional[str] = None,
724
+ version: Optional[str] = None,
725
+ ):
726
+ return self._transform_db_error(
727
+ mlrun.api.crud.Hub().list_hub_sources,
728
+ self.session,
729
+ item_name,
730
+ tag,
731
+ version,
732
+ )
733
+
720
734
  def list_pipelines(
721
735
  self,
722
736
  project: str,
@@ -842,14 +856,6 @@ class SQLRunDB(RunDBInterface):
842
856
  ):
843
857
  raise NotImplementedError()
844
858
 
845
- def list_hub_sources(
846
- self,
847
- item_name: Optional[str] = None,
848
- tag: Optional[str] = None,
849
- version: Optional[str] = None,
850
- ):
851
- raise NotImplementedError()
852
-
853
859
  def get_hub_source(self, source_name: str):
854
860
  raise NotImplementedError()
855
861
 
@@ -894,7 +900,7 @@ class SQLRunDB(RunDBInterface):
894
900
  def delete_datastore_profile(self, name: str, project: str):
895
901
  raise NotImplementedError()
896
902
 
897
- def list_datastore_profile(
903
+ def list_datastore_profiles(
898
904
  self, project: str
899
905
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
900
906
  raise NotImplementedError()
@@ -57,6 +57,7 @@ class SQLiteMigrationUtil(object):
57
57
  "projects_labels",
58
58
  "data_versions",
59
59
  "background_tasks",
60
+ "datastore_profiles",
60
61
  ]
61
62
 
62
63
  def __init__(self):
@@ -65,5 +65,7 @@ def parse_monitoring_stream_path(stream_uri: str, project: str):
65
65
 
66
66
  elif stream_uri.startswith("v3io://") and mlrun.mlconf.is_ce_mode():
67
67
  # V3IO is not supported in CE mode, generating a default http stream path
68
- stream_uri = mlrun.mlconf.model_endpoint_monitoring.default_http_sink
68
+ stream_uri = mlrun.mlconf.model_endpoint_monitoring.default_http_sink.format(
69
+ project=project
70
+ )
69
71
  return stream_uri
@@ -57,6 +57,7 @@ class ClientSpec(pydantic.BaseModel):
57
57
  redis_type: typing.Optional[str]
58
58
  sql_url: typing.Optional[str]
59
59
  model_endpoint_monitoring_store_type: typing.Optional[str]
60
+ model_endpoint_monitoring_endpoint_store_connection: typing.Optional[str]
60
61
  # ce_mode is deprecated, we will use the full ce config instead and ce_mode will be removed in 1.6.0
61
62
  ce_mode: typing.Optional[str]
62
63
  ce: typing.Optional[dict]
@@ -19,5 +19,5 @@ from pydantic import BaseModel
19
19
  class DatastoreProfile(BaseModel):
20
20
  name: str
21
21
  type: str
22
- body: str
22
+ object: str
23
23
  project: str
@@ -60,7 +60,7 @@ class FrontendSpec(pydantic.BaseModel):
60
60
  function_deployment_target_image_template: typing.Optional[str]
61
61
  function_deployment_target_image_name_prefix_template: str
62
62
  function_deployment_target_image_registries_to_enforce_prefix: typing.List[str] = []
63
- function_deployment_mlrun_command: typing.Optional[str]
63
+ function_deployment_mlrun_requirement: typing.Optional[str]
64
64
  auto_mount_type: typing.Optional[str]
65
65
  auto_mount_params: typing.Dict[str, str] = {}
66
66
  default_artifact_path: str
mlrun/config.py CHANGED
@@ -453,7 +453,8 @@ default_config = {
453
453
  "data_prefixes": {
454
454
  "default": "v3io:///projects/{project}/FeatureStore/{name}/{kind}",
455
455
  "nosql": "v3io:///projects/{project}/FeatureStore/{name}/{kind}",
456
- "redisnosql": "redis:///projects/{project}/FeatureStore/{name}/{kind}",
456
+ # "authority" is optional and generalizes [userinfo "@"] host [":" port]
457
+ "redisnosql": "redis://{authority}/projects/{project}/FeatureStore/{name}/{kind}",
457
458
  },
458
459
  "default_targets": "parquet,nosql",
459
460
  "default_job_image": "mlrun/mlrun",
@@ -1010,7 +1011,7 @@ class Config:
1010
1011
  if artifact_path[-1] != "/":
1011
1012
  artifact_path += "/"
1012
1013
 
1013
- return mlrun.utils.helpers.fill_artifact_path_template(
1014
+ return mlrun.utils.helpers.fill_project_path_template(
1014
1015
  artifact_path=artifact_path + file_path, project=project
1015
1016
  )
1016
1017
 
@@ -26,18 +26,10 @@ import mlrun.errors
26
26
  from ..secrets import get_secret_or_env
27
27
 
28
28
 
29
- class PrivateValue(pydantic.BaseModel):
30
- value: str
31
-
32
- def get(self):
33
- if self.value == "None":
34
- return None
35
- return ast.literal_eval(self.value)["value"]
36
-
37
-
38
29
  class DatastoreProfile(pydantic.BaseModel):
39
30
  type: str
40
31
  name: str
32
+ _private_attributes: typing.List = ()
41
33
 
42
34
  @pydantic.validator("name")
43
35
  def lower_case(cls, v):
@@ -58,21 +50,19 @@ class DatastoreProfile(pydantic.BaseModel):
58
50
 
59
51
  class DatastoreProfileRedis(DatastoreProfile):
60
52
  type: str = pydantic.Field("redis")
53
+ _private_attributes = ("username", "password")
61
54
  endpoint_url: str
62
- username: typing.Optional[PrivateValue]
63
- password: typing.Optional[PrivateValue]
64
-
65
- @pydantic.validator("username", "password", pre=True)
66
- def convert_to_private(cls, v):
67
- return PrivateValue(value=v)
55
+ username: typing.Optional[str] = None
56
+ password: typing.Optional[str] = None
68
57
 
69
58
  def is_secured(self):
70
59
  return self.endpoint_url.startswith("rediss://")
71
60
 
72
61
  def url_with_credentials(self):
73
62
  parsed_url = urlparse(self.endpoint_url)
74
- username = self.username.get() if self.username else None
75
- password = self.password.get() if self.password else None
63
+ username = self.username
64
+ password = self.password
65
+ netloc = parsed_url.hostname
76
66
  if username:
77
67
  if password:
78
68
  netloc = f"{username}:{password}@{parsed_url.hostname}"
@@ -106,13 +96,21 @@ class DatastoreProfile2Json(pydantic.BaseModel):
106
96
  @staticmethod
107
97
  def get_json_public(profile: DatastoreProfile) -> str:
108
98
  return DatastoreProfile2Json._to_json(
109
- {k: v for k, v in profile.dict().items() if not isinstance(v, dict)}
99
+ {
100
+ k: v
101
+ for k, v in profile.dict().items()
102
+ if not str(k) in profile._private_attributes
103
+ }
110
104
  )
111
105
 
112
106
  @staticmethod
113
107
  def get_json_private(profile: DatastoreProfile) -> str:
114
108
  return DatastoreProfile2Json._to_json(
115
- {k: v for k, v in profile.dict().items() if isinstance(v, dict)}
109
+ {
110
+ k: v
111
+ for k, v in profile.dict().items()
112
+ if str(k) in profile._private_attributes
113
+ }
116
114
  )
117
115
 
118
116
  @staticmethod
@@ -123,9 +121,18 @@ class DatastoreProfile2Json(pydantic.BaseModel):
123
121
  decoded_dict = {
124
122
  k: base64.b64decode(str(v).encode()).decode() for k, v in attributes.items()
125
123
  }
124
+
125
+ def safe_literal_eval(value):
126
+ try:
127
+ return ast.literal_eval(value)
128
+ except (ValueError, SyntaxError):
129
+ return value
130
+
131
+ decoded_dict = {k: safe_literal_eval(v) for k, v in decoded_dict.items()}
126
132
  datastore_type = decoded_dict.get("type")
127
- if datastore_type == "redis":
128
- return DatastoreProfileRedis.parse_obj(decoded_dict)
133
+ ds_profile_factory = {"redis": DatastoreProfileRedis}
134
+ if datastore_type in ds_profile_factory:
135
+ return ds_profile_factory[datastore_type].parse_obj(decoded_dict)
129
136
  else:
130
137
  if datastore_type:
131
138
  reason = f"unexpected type '{decoded_dict['type']}'"
@@ -148,6 +155,11 @@ def datastore_profile_read(url):
148
155
  public_profile = mlrun.db.get_run_db().get_datastore_profile(
149
156
  profile_name, project_name
150
157
  )
158
+ if not public_profile:
159
+ raise mlrun.errors.MLRunInvalidArgumentError(
160
+ f"Failed to fetch datastore profile '{url}' "
161
+ )
162
+
151
163
  project_ds_name_private = DatastoreProfile.generate_secret_key(
152
164
  profile_name, project_name
153
165
  )
@@ -82,9 +82,10 @@ class DatabricksFileSystemDisableCache(DatabricksFileSystem):
82
82
  # dbfs objects will be represented with the following URL: dbfs://<path>
83
83
  class DBFSStore(DataStore):
84
84
  def __init__(self, parent, schema, name, endpoint="", secrets: dict = None):
85
- if not endpoint:
86
- endpoint = mlrun.get_secret_or_env("DATABRICKS_HOST")
87
85
  super().__init__(parent, name, schema, endpoint, secrets=secrets)
86
+ if not endpoint:
87
+ endpoint = self._get_secret_or_env("DATABRICKS_HOST")
88
+ self.endpoint = endpoint
88
89
  self.get_filesystem(silent=False)
89
90
 
90
91
  def get_filesystem(self, silent=True):
@@ -96,7 +97,7 @@ class DBFSStore(DataStore):
96
97
  def get_storage_options(self):
97
98
  return dict(
98
99
  token=self._get_secret_or_env("DATABRICKS_TOKEN"),
99
- instance=mlrun.get_secret_or_env("DATABRICKS_HOST"),
100
+ instance=self._get_secret_or_env("DATABRICKS_HOST"),
100
101
  )
101
102
 
102
103
  def _verify_filesystem_and_key(self, key: str):
mlrun/datastore/redis.py CHANGED
@@ -36,6 +36,12 @@ class RedisStore(DataStore):
36
36
  self.endpoint = self.endpoint or mlrun.mlconf.redis.url
37
37
  if schema == "ds":
38
38
  datastore_profile = datastore_profile_read(name)
39
+ if not datastore_profile:
40
+ raise ValueError(f"Failed to load datastore profile '{name}'")
41
+ if datastore_profile.type != "redis":
42
+ raise ValueError(
43
+ f"Trying to use profile of type '{datastore_profile.type}' as redis datastore"
44
+ )
39
45
  self._redis_url = datastore_profile.url_with_credentials()
40
46
  self.secure = datastore_profile.is_secured()
41
47
  else:
@@ -600,7 +600,12 @@ class BaseStoreTarget(DataTargetBase):
600
600
 
601
601
  def get_target_path(self):
602
602
  path_object = self._target_path_object
603
- return path_object.get_absolute_path() if path_object else None
603
+ project_name = self._resource.metadata.project if self._resource else None
604
+ return (
605
+ path_object.get_absolute_path(project_name=project_name)
606
+ if path_object
607
+ else None
608
+ )
604
609
 
605
610
  def get_target_path_with_credentials(self):
606
611
  return self.get_target_path()
@@ -1174,6 +1179,12 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1174
1179
  endpoint = endpoint or mlrun.mlconf.redis.url
1175
1180
  if endpoint.startswith("ds"):
1176
1181
  datastore_profile = datastore_profile_read(endpoint)
1182
+ if not datastore_profile:
1183
+ raise ValueError(f"Failed to load datastore profile '{endpoint}'")
1184
+ if datastore_profile.type != "redis":
1185
+ raise ValueError(
1186
+ f"Trying to use profile of type '{datastore_profile.type}' as redis datastore"
1187
+ )
1177
1188
  endpoint = datastore_profile.url_with_credentials()
1178
1189
  else:
1179
1190
  parsed_endpoint = urlparse(endpoint)
mlrun/db/base.py CHANGED
@@ -649,7 +649,7 @@ class RunDBInterface(ABC):
649
649
  ) -> mlrun.common.schemas.DatastoreProfile:
650
650
  pass
651
651
 
652
- def list_datastore_profile(
652
+ def list_datastore_profiles(
653
653
  self, project: str
654
654
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
655
655
  pass
mlrun/db/httpdb.py CHANGED
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  import enum
15
15
  import http
16
- import json
17
16
  import re
18
17
  import tempfile
19
18
  import time
@@ -440,6 +439,10 @@ class HTTPRunDB(RunDBInterface):
440
439
  server_cfg.get("model_endpoint_monitoring_store_type")
441
440
  or config.model_endpoint_monitoring.store_type
442
441
  )
442
+ config.model_endpoint_monitoring.endpoint_store_connection = (
443
+ server_cfg.get("model_endpoint_monitoring_endpoint_store_connection")
444
+ or config.model_endpoint_monitoring.endpoint_store_connection
445
+ )
443
446
  config.packagers = server_cfg.get("packagers") or config.packagers
444
447
  server_data_prefixes = server_cfg.get("feature_store_data_prefixes") or {}
445
448
  for prefix in ["default", "nosql", "redisnosql"]:
@@ -1673,7 +1676,6 @@ class HTTPRunDB(RunDBInterface):
1673
1676
  order,
1674
1677
  max_partitions=None,
1675
1678
  ):
1676
-
1677
1679
  partition_params = {
1678
1680
  "partition-by": partition_by,
1679
1681
  "rows-per-partition": rows_per_partition,
@@ -2186,7 +2188,6 @@ class HTTPRunDB(RunDBInterface):
2186
2188
  error_message = f"Failed listing projects, query: {params}"
2187
2189
  response = self.api_call("GET", "projects", error_message, params=params)
2188
2190
  if format_ == mlrun.common.schemas.ProjectsFormat.name_only:
2189
-
2190
2191
  # projects is just a list of strings
2191
2192
  return response.json()["projects"]
2192
2193
 
@@ -3299,11 +3300,11 @@ class HTTPRunDB(RunDBInterface):
3299
3300
  self, name: str, project: str
3300
3301
  ) -> Optional[mlrun.common.schemas.DatastoreProfile]:
3301
3302
  project = project or config.default_project
3302
- path = self._path_of("projects", project, "datastore_profiles") + f"/{name}"
3303
+ path = self._path_of("projects", project, "datastore-profiles") + f"/{name}"
3303
3304
 
3304
3305
  res = self.api_call(method="GET", path=path)
3305
- if res and res._content:
3306
- public_wrapper = json.loads(res._content)
3306
+ if res:
3307
+ public_wrapper = res.json()
3307
3308
  datastore = DatastoreProfile2Json.create_from_json(
3308
3309
  public_json=public_wrapper["body"]
3309
3310
  )
@@ -3313,7 +3314,7 @@ class HTTPRunDB(RunDBInterface):
3313
3314
  def delete_datastore_profile(self, name: str, project: str):
3314
3315
  pass
3315
3316
 
3316
- def list_datastore_profile(
3317
+ def list_datastore_profiles(
3317
3318
  self, project: str
3318
3319
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
3319
3320
  pass
@@ -3326,9 +3327,9 @@ class HTTPRunDB(RunDBInterface):
3326
3327
  :returns: None
3327
3328
  """
3328
3329
  project = project or config.default_project
3329
- path = self._path_of("projects", project, "datastore_profiles")
3330
+ path = self._path_of("projects", project, "datastore-profiles")
3330
3331
 
3331
- self.api_call(method="PUT", path=path, body=json.dumps(profile.dict()))
3332
+ self.api_call(method="PUT", path=path, json=profile.dict())
3332
3333
 
3333
3334
 
3334
3335
  def _as_json(obj):