mlrun 1.10.0rc2__py3-none-any.whl → 1.10.0rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (67) hide show
  1. mlrun/__init__.py +2 -2
  2. mlrun/__main__.py +2 -2
  3. mlrun/artifacts/__init__.py +1 -0
  4. mlrun/artifacts/base.py +20 -8
  5. mlrun/artifacts/dataset.py +1 -1
  6. mlrun/artifacts/document.py +1 -1
  7. mlrun/artifacts/helpers.py +40 -0
  8. mlrun/artifacts/llm_prompt.py +165 -0
  9. mlrun/artifacts/manager.py +13 -1
  10. mlrun/artifacts/model.py +92 -12
  11. mlrun/artifacts/plots.py +2 -2
  12. mlrun/common/formatters/artifact.py +1 -0
  13. mlrun/common/runtimes/constants.py +0 -21
  14. mlrun/common/schemas/artifact.py +12 -12
  15. mlrun/common/schemas/pipeline.py +0 -16
  16. mlrun/common/schemas/project.py +0 -17
  17. mlrun/common/schemas/runs.py +0 -17
  18. mlrun/config.py +3 -3
  19. mlrun/datastore/base.py +2 -2
  20. mlrun/datastore/datastore.py +1 -1
  21. mlrun/datastore/datastore_profile.py +3 -11
  22. mlrun/datastore/redis.py +2 -3
  23. mlrun/datastore/sources.py +0 -9
  24. mlrun/datastore/store_resources.py +3 -3
  25. mlrun/datastore/storeytargets.py +2 -5
  26. mlrun/datastore/targets.py +7 -57
  27. mlrun/datastore/utils.py +1 -11
  28. mlrun/db/base.py +7 -6
  29. mlrun/db/httpdb.py +72 -66
  30. mlrun/db/nopdb.py +1 -0
  31. mlrun/errors.py +22 -1
  32. mlrun/execution.py +87 -1
  33. mlrun/feature_store/common.py +5 -5
  34. mlrun/feature_store/feature_set.py +10 -6
  35. mlrun/feature_store/feature_vector.py +8 -6
  36. mlrun/launcher/base.py +1 -1
  37. mlrun/lists.py +1 -1
  38. mlrun/model.py +0 -5
  39. mlrun/model_monitoring/__init__.py +0 -1
  40. mlrun/model_monitoring/api.py +0 -44
  41. mlrun/model_monitoring/applications/evidently/base.py +3 -41
  42. mlrun/model_monitoring/controller.py +1 -1
  43. mlrun/model_monitoring/writer.py +1 -4
  44. mlrun/projects/operations.py +3 -3
  45. mlrun/projects/project.py +260 -23
  46. mlrun/run.py +9 -27
  47. mlrun/runtimes/base.py +6 -6
  48. mlrun/runtimes/kubejob.py +2 -2
  49. mlrun/runtimes/nuclio/function.py +3 -3
  50. mlrun/runtimes/nuclio/serving.py +13 -23
  51. mlrun/runtimes/remotesparkjob.py +6 -0
  52. mlrun/runtimes/sparkjob/spark3job.py +6 -0
  53. mlrun/serving/__init__.py +5 -1
  54. mlrun/serving/server.py +39 -3
  55. mlrun/serving/states.py +101 -4
  56. mlrun/serving/v2_serving.py +1 -1
  57. mlrun/utils/helpers.py +66 -9
  58. mlrun/utils/notifications/notification/slack.py +5 -1
  59. mlrun/utils/notifications/notification_pusher.py +2 -1
  60. mlrun/utils/version/version.json +2 -2
  61. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/METADATA +22 -10
  62. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/RECORD +66 -65
  63. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/WHEEL +1 -1
  64. mlrun/model_monitoring/tracking_policy.py +0 -124
  65. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/entry_points.txt +0 -0
  66. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/licenses/LICENSE +0 -0
  67. {mlrun-1.10.0rc2.dist-info → mlrun-1.10.0rc4.dist-info}/top_level.txt +0 -0
@@ -12,11 +12,8 @@
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
14
 
15
- import enum
16
15
  import typing
17
16
 
18
- from deprecated import deprecated
19
-
20
17
  import mlrun.common.constants as mlrun_constants
21
18
  import mlrun_pipelines.common.models
22
19
 
@@ -239,24 +236,6 @@ class RunStates:
239
236
  }[pipeline_run_status]
240
237
 
241
238
 
242
- # TODO: remove this class in 1.11.0 - use only MLRunInternalLabels
243
- @deprecated(
244
- version="1.9.0",
245
- reason="This class is deprecated and will be removed in 1.11.0. Use MLRunInternalLabels instead.",
246
- category=FutureWarning,
247
- )
248
- class RunLabels(enum.Enum):
249
- owner = mlrun_constants.MLRunInternalLabels.owner
250
- v3io_user = mlrun_constants.MLRunInternalLabels.v3io_user
251
-
252
- @staticmethod
253
- def all():
254
- return [
255
- RunLabels.owner,
256
- RunLabels.v3io_user,
257
- ]
258
-
259
-
260
239
  class SparkApplicationStates:
261
240
  """
262
241
  https://github.com/GoogleCloudPlatform/spark-on-k8s-operator/blob/master/pkg/apis/sparkoperator.k8s.io/v1beta2/types.go#L321
@@ -15,7 +15,6 @@
15
15
  import typing
16
16
 
17
17
  import pydantic.v1
18
- from deprecated import deprecated
19
18
 
20
19
  import mlrun.common.types
21
20
 
@@ -26,6 +25,7 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
26
25
  model = "model"
27
26
  dataset = "dataset"
28
27
  document = "document"
28
+ llm_prompt = "llm-prompt"
29
29
  other = "other"
30
30
 
31
31
  # we define the link as a category to prevent import cycles, but it's not a real category
@@ -41,19 +41,27 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
41
41
  return [ArtifactCategories.dataset.value, link_kind], False
42
42
  if self.value == ArtifactCategories.document.value:
43
43
  return [ArtifactCategories.document.value, link_kind], False
44
+ if self.value == ArtifactCategories.llm_prompt.value:
45
+ return [ArtifactCategories.llm_prompt.value, link_kind], False
44
46
  if self.value == ArtifactCategories.other.value:
45
47
  return (
46
48
  [
47
49
  ArtifactCategories.model.value,
48
50
  ArtifactCategories.dataset.value,
49
51
  ArtifactCategories.document.value,
52
+ ArtifactCategories.llm_prompt.value,
50
53
  ],
51
54
  True,
52
55
  )
53
56
 
54
57
  @classmethod
55
58
  def from_kind(cls, kind: str) -> "ArtifactCategories":
56
- if kind in [cls.model.value, cls.dataset.value, cls.document.value]:
59
+ if kind in [
60
+ cls.model.value,
61
+ cls.dataset.value,
62
+ cls.document.value,
63
+ cls.llm_prompt.value,
64
+ ]:
57
65
  return cls(kind)
58
66
  return cls.other
59
67
 
@@ -64,6 +72,7 @@ class ArtifactCategories(mlrun.common.types.StrEnum):
64
72
  ArtifactCategories.model,
65
73
  ArtifactCategories.dataset,
66
74
  ArtifactCategories.document,
75
+ ArtifactCategories.llm_prompt,
67
76
  ]
68
77
 
69
78
 
@@ -78,16 +87,6 @@ class ArtifactIdentifier(pydantic.v1.BaseModel):
78
87
  # hash: typing.Optional[str]
79
88
 
80
89
 
81
- @deprecated(
82
- version="1.7.0",
83
- reason="mlrun.common.schemas.ArtifactsFormat is deprecated and will be removed in 1.9.0. "
84
- "Use mlrun.common.formatters.ArtifactFormat instead.",
85
- category=FutureWarning,
86
- )
87
- class ArtifactsFormat(mlrun.common.types.StrEnum):
88
- full = "full"
89
-
90
-
91
90
  class ArtifactMetadata(pydantic.v1.BaseModel):
92
91
  key: str
93
92
  project: str
@@ -108,6 +107,7 @@ class ArtifactSpec(pydantic.v1.BaseModel):
108
107
  db_key: typing.Optional[str]
109
108
  extra_data: typing.Optional[dict[str, typing.Any]]
110
109
  unpackaging_instructions: typing.Optional[dict[str, typing.Any]]
110
+ parent_uri: typing.Optional[str]
111
111
 
112
112
  class Config:
113
113
  extra = pydantic.v1.Extra.allow
@@ -15,22 +15,6 @@
15
15
  import typing
16
16
 
17
17
  import pydantic.v1
18
- from deprecated import deprecated
19
-
20
- import mlrun.common.types
21
-
22
-
23
- @deprecated(
24
- version="1.7.0",
25
- reason="mlrun.common.schemas.PipelinesFormat is deprecated and will be removed in 1.9.0. "
26
- "Use mlrun.common.formatters.PipelineFormat instead.",
27
- category=FutureWarning,
28
- )
29
- class PipelinesFormat(mlrun.common.types.StrEnum):
30
- full = "full"
31
- metadata_only = "metadata_only"
32
- summary = "summary"
33
- name_only = "name_only"
34
18
 
35
19
 
36
20
  class PipelinesPagination(str):
@@ -16,7 +16,6 @@ import datetime
16
16
  import typing
17
17
 
18
18
  import pydantic.v1
19
- from deprecated import deprecated
20
19
 
21
20
  import mlrun.common.types
22
21
 
@@ -24,22 +23,6 @@ from .common import ImageBuilder
24
23
  from .object import ObjectKind, ObjectStatus
25
24
 
26
25
 
27
- @deprecated(
28
- version="1.7.0",
29
- reason="mlrun.common.schemas.ProjectsFormat is deprecated and will be removed in 1.9.0. "
30
- "Use mlrun.common.formatters.ProjectFormat instead.",
31
- category=FutureWarning,
32
- )
33
- class ProjectsFormat(mlrun.common.types.StrEnum):
34
- full = "full"
35
- name_only = "name_only"
36
- # minimal format removes large fields from the response (e.g. functions, workflows, artifacts)
37
- # and is used for faster response times (in the UI)
38
- minimal = "minimal"
39
- # internal - allowed only in follower mode, only for the leader for upgrade purposes
40
- leader = "leader"
41
-
42
-
43
26
  class ProjectMetadata(pydantic.v1.BaseModel):
44
27
  name: str
45
28
  created: typing.Optional[datetime.datetime] = None
@@ -15,26 +15,9 @@
15
15
  import typing
16
16
 
17
17
  import pydantic.v1
18
- from deprecated import deprecated
19
-
20
- import mlrun.common.types
21
18
 
22
19
 
23
20
  class RunIdentifier(pydantic.v1.BaseModel):
24
21
  kind: typing.Literal["run"] = "run"
25
22
  uid: typing.Optional[str]
26
23
  iter: typing.Optional[int]
27
-
28
-
29
- @deprecated(
30
- version="1.7.0",
31
- reason="mlrun.common.schemas.RunsFormat is deprecated and will be removed in 1.9.0. "
32
- "Use mlrun.common.formatters.RunFormat instead.",
33
- category=FutureWarning,
34
- )
35
- class RunsFormat(mlrun.common.types.StrEnum):
36
- # No enrichment, data is pulled as-is from the database.
37
- standard = "standard"
38
-
39
- # Performs run enrichment, including the run's artifacts. Only available for the `get` run API.
40
- full = "full"
mlrun/config.py CHANGED
@@ -79,7 +79,7 @@ default_config = {
79
79
  # comma separated list of images that are in the specified images_registry, and therefore will be enriched with this
80
80
  # registry when used. default to mlrun/* which means any image which is of the mlrun repository (mlrun/mlrun,
81
81
  # mlrun/ml-base, etc...)
82
- "images_to_enrich_registry": "^mlrun/*,python:3.9",
82
+ "images_to_enrich_registry": "^mlrun/*,^python:3.(9|11)$",
83
83
  "kfp_url": "",
84
84
  "kfp_ttl": "14400", # KFP ttl in sec, after that completed PODs will be deleted
85
85
  "kfp_image": "mlrun/mlrun-kfp", # image to use for KFP runner
@@ -94,7 +94,7 @@ default_config = {
94
94
  "default_base_image": "mlrun/mlrun", # default base image when doing .deploy()
95
95
  # template for project default image name. Parameter {name} will be replaced with project name
96
96
  "default_project_image_name": ".mlrun-project-image-{name}",
97
- "default_project": "default", # default project name
97
+ "active_project": "", # active project name
98
98
  "default_archive": "", # default remote archive URL (for build tar.gz)
99
99
  "mpijob_crd_version": "", # mpijob crd version (e.g: "v1alpha1". must be in: mlrun.runtime.MPIJobCRDVersions)
100
100
  "ipython_widget": True,
@@ -286,7 +286,7 @@ default_config = {
286
286
  "remote": "mlrun/mlrun",
287
287
  "dask": "mlrun/ml-base",
288
288
  "mpijob": "mlrun/mlrun",
289
- "application": "python:3.9",
289
+ "application": "python",
290
290
  },
291
291
  # see enrich_function_preemption_spec for more info,
292
292
  # and mlrun.common.schemas.function.PreemptionModes for available options
mlrun/datastore/base.py CHANGED
@@ -104,10 +104,10 @@ class DataStore:
104
104
  """Whether the data store supports isdir"""
105
105
  return True
106
106
 
107
- def _get_secret_or_env(self, key, default=None, prefix=None):
107
+ def _get_secret_or_env(self, key, default=None):
108
108
  # Project-secrets are mounted as env variables whose name can be retrieved from SecretsStore
109
109
  return mlrun.get_secret_or_env(
110
- key, secret_provider=self._get_secret, default=default, prefix=prefix
110
+ key, secret_provider=self._get_secret, default=default
111
111
  )
112
112
 
113
113
  def get_storage_options(self):
@@ -54,7 +54,7 @@ def parse_url(url):
54
54
  return schema, endpoint, parsed_url
55
55
 
56
56
 
57
- def schema_to_store(schema):
57
+ def schema_to_store(schema) -> DataStore.__subclasses__():
58
58
  # import store classes inside to enable making their dependencies optional (package extras)
59
59
 
60
60
  if not schema or schema in get_local_file_schema():
@@ -141,19 +141,11 @@ class ConfigProfile(DatastoreProfile):
141
141
  class DatastoreProfileKafkaTarget(DatastoreProfile):
142
142
  type: str = pydantic.v1.Field("kafka_target")
143
143
  _private_attributes = "kwargs_private"
144
- brokers: typing.Optional[str] = None
144
+ brokers: str
145
145
  topic: str
146
146
  kwargs_public: typing.Optional[dict]
147
147
  kwargs_private: typing.Optional[dict]
148
148
 
149
- def __init__(self, **kwargs):
150
- super().__init__(**kwargs)
151
-
152
- if not self.brokers:
153
- raise mlrun.errors.MLRunInvalidArgumentError(
154
- "DatastoreProfileKafkaTarget requires the 'brokers' field to be set"
155
- )
156
-
157
149
  def get_topic(self) -> typing.Optional[str]:
158
150
  return self.topic
159
151
 
@@ -555,7 +547,7 @@ def datastore_profile_read(url, project_name="", secrets: typing.Optional[dict]
555
547
  url (str): A URL with 'ds' scheme pointing to the datastore profile
556
548
  (e.g., 'ds://profile-name').
557
549
  project_name (str, optional): The project name where the profile is stored.
558
- Defaults to MLRun's default project.
550
+ Defaults to MLRun's active project.
559
551
  secrets (dict, optional): Dictionary containing secrets needed for profile retrieval.
560
552
 
561
553
  Returns:
@@ -580,7 +572,7 @@ def datastore_profile_read(url, project_name="", secrets: typing.Optional[dict]
580
572
  )
581
573
 
582
574
  profile_name = parsed_url.hostname
583
- project_name = project_name or mlrun.mlconf.default_project
575
+ project_name = project_name or mlrun.mlconf.active_project
584
576
  datastore = TemporaryClientDatastoreProfiles().get(profile_name)
585
577
  if datastore:
586
578
  return datastore
mlrun/datastore/redis.py CHANGED
@@ -48,9 +48,8 @@ class RedisStore(DataStore):
48
48
  raise mlrun.errors.MLRunInvalidArgumentError(
49
49
  "Provide Redis username and password only via secrets"
50
50
  )
51
- credentials_prefix = self._get_secret_or_env("CREDENTIALS_PREFIX")
52
- user = self._get_secret_or_env("REDIS_USER", "", credentials_prefix)
53
- password = self._get_secret_or_env("REDIS_PASSWORD", "", credentials_prefix)
51
+ user = self._get_secret_or_env("REDIS_USER", "")
52
+ password = self._get_secret_or_env("REDIS_PASSWORD", "")
54
53
  host = parsed_endpoint.hostname
55
54
  port = parsed_endpoint.port if parsed_endpoint.port else redis_default_port
56
55
  schema = parsed_endpoint.scheme
@@ -768,7 +768,6 @@ class SnowflakeSource(BaseSourceDriver):
768
768
  :parameter url: URL of the snowflake cluster
769
769
  :parameter user: snowflake user
770
770
  :parameter database: snowflake database
771
- :parameter schema: snowflake schema - deprecated, use db_schema
772
771
  :parameter db_schema: snowflake schema
773
772
  :parameter warehouse: snowflake warehouse
774
773
  """
@@ -790,18 +789,10 @@ class SnowflakeSource(BaseSourceDriver):
790
789
  url: Optional[str] = None,
791
790
  user: Optional[str] = None,
792
791
  database: Optional[str] = None,
793
- schema: Optional[str] = None,
794
792
  db_schema: Optional[str] = None,
795
793
  warehouse: Optional[str] = None,
796
794
  **kwargs,
797
795
  ):
798
- # TODO: Remove in 1.9.0
799
- if schema:
800
- warnings.warn(
801
- "schema is deprecated in 1.7.0, and will be removed in 1.9.0, please use db_schema"
802
- )
803
- db_schema = db_schema or schema # TODO: Remove in 1.9.0
804
-
805
796
  attributes = attributes or {}
806
797
  if url:
807
798
  attributes["url"] = url
@@ -152,19 +152,19 @@ def get_store_resource(
152
152
  )
153
153
  elif kind == StorePrefix.FeatureSet:
154
154
  project, name, tag, uid = parse_versioned_object_uri(
155
- uri, project or config.default_project
155
+ uri, project or config.active_project
156
156
  )
157
157
  return db.get_feature_set(name, project, tag, uid)
158
158
 
159
159
  elif kind == StorePrefix.FeatureVector:
160
160
  project, name, tag, uid = parse_versioned_object_uri(
161
- uri, project or config.default_project
161
+ uri, project or config.active_project
162
162
  )
163
163
  return db.get_feature_vector(name, project, tag, uid)
164
164
 
165
165
  elif StorePrefix.is_artifact(kind):
166
166
  project, key, iteration, tag, tree, uid = parse_artifact_uri(
167
- uri, project or config.default_project
167
+ uri, project or config.active_project
168
168
  )
169
169
  resource = db.read_artifact(
170
170
  key,
@@ -152,9 +152,7 @@ class KafkaStoreyTarget(storey.KafkaTarget):
152
152
  parsed.path.strip("/") if parsed.path else datastore_profile.get_topic()
153
153
  )
154
154
  else:
155
- brokers = attributes.pop(
156
- "brokers", attributes.pop("bootstrap_servers", None)
157
- )
155
+ brokers = attributes.pop("brokers", None)
158
156
  topic, brokers = parse_kafka_url(path, brokers)
159
157
 
160
158
  if not topic:
@@ -175,8 +173,7 @@ class RedisNoSqlStoreyTarget(storey.NoSqlTarget):
175
173
  def __init__(self, *args, **kwargs):
176
174
  path = kwargs.pop("path")
177
175
  endpoint, uri = mlrun.datastore.targets.RedisNoSqlTarget.get_server_endpoint(
178
- path,
179
- kwargs.pop("credentials_prefix", None),
176
+ path
180
177
  )
181
178
  kwargs["path"] = endpoint + "/" + uri
182
179
  super().__init__(*args, **kwargs)
@@ -17,7 +17,6 @@ import os
17
17
  import random
18
18
  import sys
19
19
  import time
20
- import warnings
21
20
  from collections import Counter
22
21
  from copy import copy
23
22
  from typing import Any, Optional, Union
@@ -409,7 +408,6 @@ class BaseStoreTarget(DataTargetBase):
409
408
  flush_after_seconds: Optional[int] = None,
410
409
  storage_options: Optional[dict[str, str]] = None,
411
410
  schema: Optional[dict[str, Any]] = None,
412
- credentials_prefix=None,
413
411
  ):
414
412
  super().__init__(
415
413
  self.kind,
@@ -424,7 +422,6 @@ class BaseStoreTarget(DataTargetBase):
424
422
  max_events,
425
423
  flush_after_seconds,
426
424
  schema=schema,
427
- credentials_prefix=credentials_prefix,
428
425
  )
429
426
 
430
427
  self.name = name or self.kind
@@ -440,13 +437,6 @@ class BaseStoreTarget(DataTargetBase):
440
437
  self.flush_after_seconds = flush_after_seconds
441
438
  self.storage_options = storage_options
442
439
  self.schema = schema or {}
443
- self.credentials_prefix = credentials_prefix
444
- if credentials_prefix:
445
- warnings.warn(
446
- "The 'credentials_prefix' parameter is deprecated and will be removed in "
447
- "1.9.0. Please use datastore profiles instead.",
448
- FutureWarning,
449
- )
450
440
 
451
441
  self._target = None
452
442
  self._resource = None
@@ -457,18 +447,11 @@ class BaseStoreTarget(DataTargetBase):
457
447
  key,
458
448
  secret_provider=self._secrets,
459
449
  default=default_value,
460
- prefix=self.credentials_prefix,
461
450
  )
462
451
 
463
452
  def _get_store_and_path(self):
464
- credentials_prefix_secrets = (
465
- {"CREDENTIALS_PREFIX": self.credentials_prefix}
466
- if self.credentials_prefix
467
- else None
468
- )
469
453
  store, resolved_store_path, url = mlrun.store_manager.get_or_create_store(
470
- self.get_target_path(),
471
- credentials_prefix_secrets,
454
+ self.get_target_path()
472
455
  )
473
456
  return store, resolved_store_path, url
474
457
 
@@ -621,7 +604,6 @@ class BaseStoreTarget(DataTargetBase):
621
604
  driver.path = spec.path
622
605
  driver.attributes = spec.attributes
623
606
  driver.schema = spec.schema
624
- driver.credentials_prefix = spec.credentials_prefix
625
607
 
626
608
  if hasattr(spec, "columns"):
627
609
  driver.columns = spec.columns
@@ -638,7 +620,6 @@ class BaseStoreTarget(DataTargetBase):
638
620
  driver.max_events = spec.max_events
639
621
  driver.flush_after_seconds = spec.flush_after_seconds
640
622
  driver.storage_options = spec.storage_options
641
- driver.credentials_prefix = spec.credentials_prefix
642
623
 
643
624
  driver._resource = resource
644
625
  driver.run_id = spec.run_id
@@ -720,7 +701,6 @@ class BaseStoreTarget(DataTargetBase):
720
701
  target.key_bucketing_number = self.key_bucketing_number
721
702
  target.partition_cols = self.partition_cols
722
703
  target.time_partitioning_granularity = self.time_partitioning_granularity
723
- target.credentials_prefix = self.credentials_prefix
724
704
 
725
705
  self._resource.status.update_target(target)
726
706
  return target
@@ -1213,7 +1193,6 @@ class SnowflakeTarget(BaseStoreTarget):
1213
1193
  flush_after_seconds: Optional[int] = None,
1214
1194
  storage_options: Optional[dict[str, str]] = None,
1215
1195
  schema: Optional[dict[str, Any]] = None,
1216
- credentials_prefix=None,
1217
1196
  url: Optional[str] = None,
1218
1197
  user: Optional[str] = None,
1219
1198
  db_schema: Optional[str] = None,
@@ -1249,7 +1228,6 @@ class SnowflakeTarget(BaseStoreTarget):
1249
1228
  flush_after_seconds=flush_after_seconds,
1250
1229
  storage_options=storage_options,
1251
1230
  schema=schema,
1252
- credentials_prefix=credentials_prefix,
1253
1231
  )
1254
1232
 
1255
1233
  def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
@@ -1488,7 +1466,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1488
1466
  writer_step_name = "RedisNoSqlTarget"
1489
1467
 
1490
1468
  @staticmethod
1491
- def get_server_endpoint(path, credentials_prefix=None):
1469
+ def get_server_endpoint(path):
1492
1470
  endpoint, uri = parse_path(path)
1493
1471
  endpoint = endpoint or mlrun.mlconf.redis.url
1494
1472
  if endpoint.startswith("ds://"):
@@ -1506,15 +1484,8 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1506
1484
  raise mlrun.errors.MLRunInvalidArgumentError(
1507
1485
  "Provide Redis username and password only via secrets"
1508
1486
  )
1509
- credentials_prefix = credentials_prefix or mlrun.get_secret_or_env(
1510
- key="CREDENTIALS_PREFIX"
1511
- )
1512
- user = mlrun.get_secret_or_env(
1513
- "REDIS_USER", default="", prefix=credentials_prefix
1514
- )
1515
- password = mlrun.get_secret_or_env(
1516
- "REDIS_PASSWORD", default="", prefix=credentials_prefix
1517
- )
1487
+ user = mlrun.get_secret_or_env("REDIS_USER", default="")
1488
+ password = mlrun.get_secret_or_env("REDIS_PASSWORD", default="")
1518
1489
  host = parsed_endpoint.hostname
1519
1490
  port = parsed_endpoint.port if parsed_endpoint.port else "6379"
1520
1491
  scheme = parsed_endpoint.scheme
@@ -1528,9 +1499,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1528
1499
  from storey import Table
1529
1500
  from storey.redis_driver import RedisDriver
1530
1501
 
1531
- endpoint, uri = self.get_server_endpoint(
1532
- self.get_target_path(), self.credentials_prefix
1533
- )
1502
+ endpoint, uri = self.get_server_endpoint(self.get_target_path())
1534
1503
 
1535
1504
  return Table(
1536
1505
  uri,
@@ -1539,9 +1508,7 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1539
1508
  )
1540
1509
 
1541
1510
  def get_spark_options(self, key_column=None, timestamp_key=None, overwrite=True):
1542
- endpoint, uri = self.get_server_endpoint(
1543
- self.get_target_path(), self.credentials_prefix
1544
- )
1511
+ endpoint, uri = self.get_server_endpoint(self.get_target_path())
1545
1512
  parsed_endpoint = urlparse(endpoint)
1546
1513
  store, path_in_store, path = self._get_store_and_path()
1547
1514
  return {
@@ -1592,7 +1559,6 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1592
1559
  class_name="mlrun.datastore.storeytargets.RedisNoSqlStoreyTarget",
1593
1560
  columns=column_list,
1594
1561
  table=table,
1595
- credentials_prefix=self.credentials_prefix,
1596
1562
  **self.attributes,
1597
1563
  )
1598
1564
 
@@ -1648,7 +1614,6 @@ class KafkaTarget(BaseStoreTarget):
1648
1614
  :param path: topic name e.g. "my_topic"
1649
1615
  :param after_step: optional, after what step in the graph to add the target
1650
1616
  :param columns: optional, which columns from data to write
1651
- :param bootstrap_servers: Deprecated. Use the brokers parameter instead
1652
1617
  :param producer_options: additional configurations for kafka producer
1653
1618
  :param brokers: kafka broker as represented by a host:port pair, or a list of kafka brokers, e.g.
1654
1619
  "localhost:9092", or ["kafka-broker-1:9092", "kafka-broker-2:9092"]
@@ -1664,27 +1629,12 @@ class KafkaTarget(BaseStoreTarget):
1664
1629
  def __init__(
1665
1630
  self,
1666
1631
  *args,
1667
- bootstrap_servers=None,
1668
1632
  producer_options=None,
1669
1633
  brokers=None,
1670
1634
  **kwargs,
1671
1635
  ):
1672
1636
  attrs = {}
1673
1637
 
1674
- # TODO: Remove this in 1.9.0
1675
- if bootstrap_servers:
1676
- if brokers:
1677
- raise mlrun.errors.MLRunInvalidArgumentError(
1678
- "KafkaTarget cannot be created with both the 'brokers' parameter and the deprecated "
1679
- "'bootstrap_servers' parameter. Please use 'brokers' only."
1680
- )
1681
- warnings.warn(
1682
- "'bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in 1.9.0, "
1683
- "use 'brokers' instead.",
1684
- FutureWarning,
1685
- )
1686
- brokers = bootstrap_servers
1687
-
1688
1638
  if brokers:
1689
1639
  attrs["brokers"] = brokers
1690
1640
  if producer_options is not None:
@@ -2239,7 +2189,7 @@ def _get_target_path(driver, resource, run_id_mode=False, netloc=None, scheme=""
2239
2189
  else "vectors"
2240
2190
  )
2241
2191
  name = resource.metadata.name
2242
- project = resource.metadata.project or mlrun.mlconf.default_project
2192
+ project = resource.metadata.project or mlrun.mlconf.active_project
2243
2193
 
2244
2194
  default_kind_name = kind
2245
2195
  if scheme == "ds":
mlrun/datastore/utils.py CHANGED
@@ -16,7 +16,6 @@ import math
16
16
  import tarfile
17
17
  import tempfile
18
18
  import typing
19
- import warnings
20
19
  from urllib.parse import parse_qs, urlparse
21
20
 
22
21
  import pandas as pd
@@ -171,16 +170,7 @@ def _generate_sql_query_with_time_filter(
171
170
  def get_kafka_brokers_from_dict(options: dict, pop=False) -> typing.Optional[str]:
172
171
  get_or_pop = options.pop if pop else options.get
173
172
  kafka_brokers = get_or_pop("kafka_brokers", None)
174
- if kafka_brokers:
175
- return kafka_brokers
176
- kafka_bootstrap_servers = get_or_pop("kafka_bootstrap_servers", None)
177
- if kafka_bootstrap_servers:
178
- warnings.warn(
179
- "The 'kafka_bootstrap_servers' parameter is deprecated and will be removed in "
180
- "1.9.0. Please pass the 'kafka_brokers' parameter instead.",
181
- FutureWarning,
182
- )
183
- return kafka_bootstrap_servers
173
+ return kafka_brokers
184
174
 
185
175
 
186
176
  def transform_list_filters_to_tuple(additional_filters):
mlrun/db/base.py CHANGED
@@ -185,6 +185,7 @@ class RunDBInterface(ABC):
185
185
  kind: Optional[str] = None,
186
186
  category: Union[str, mlrun.common.schemas.ArtifactCategories] = None,
187
187
  tree: Optional[str] = None,
188
+ parent: Optional[str] = None,
188
189
  format_: mlrun.common.formatters.ArtifactFormat = mlrun.common.formatters.ArtifactFormat.full,
189
190
  limit: Optional[int] = None,
190
191
  partition_by: Optional[
@@ -441,10 +442,10 @@ class RunDBInterface(ABC):
441
442
  ) -> dict:
442
443
  pass
443
444
 
444
- # TODO: remove in 1.9.0
445
+ # TODO: remove in 1.10.0
445
446
  @deprecated(
446
- version="1.9.0",
447
- reason="'list_features' will be removed in 1.9.0, use 'list_features_v2' instead",
447
+ version="1.7.0",
448
+ reason="'list_features' will be removed in 1.10.0, use 'list_features_v2' instead",
448
449
  category=FutureWarning,
449
450
  )
450
451
  @abstractmethod
@@ -469,10 +470,10 @@ class RunDBInterface(ABC):
469
470
  ) -> mlrun.common.schemas.FeaturesOutputV2:
470
471
  pass
471
472
 
472
- # TODO: remove in 1.9.0
473
+ # TODO: remove in 1.10.0
473
474
  @deprecated(
474
- version="1.9.0",
475
- reason="'list_entities' will be removed in 1.9.0, use 'list_entities_v2' instead",
475
+ version="1.7.0",
476
+ reason="'list_entities' will be removed in 1.10.0, use 'list_entities_v2' instead",
476
477
  category=FutureWarning,
477
478
  )
478
479
  @abstractmethod