mlrun 1.10.0rc1__py3-none-any.whl → 1.10.0rc3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (59) hide show
  1. mlrun/__init__.py +2 -2
  2. mlrun/__main__.py +15 -4
  3. mlrun/artifacts/base.py +6 -6
  4. mlrun/artifacts/dataset.py +1 -1
  5. mlrun/artifacts/document.py +1 -1
  6. mlrun/artifacts/model.py +1 -1
  7. mlrun/artifacts/plots.py +2 -2
  8. mlrun/common/constants.py +7 -0
  9. mlrun/common/runtimes/constants.py +1 -1
  10. mlrun/common/schemas/__init__.py +1 -0
  11. mlrun/common/schemas/artifact.py +1 -1
  12. mlrun/common/schemas/pipeline.py +1 -1
  13. mlrun/common/schemas/project.py +1 -1
  14. mlrun/common/schemas/runs.py +1 -1
  15. mlrun/common/schemas/serving.py +17 -0
  16. mlrun/config.py +4 -4
  17. mlrun/datastore/datastore_profile.py +7 -57
  18. mlrun/datastore/sources.py +24 -16
  19. mlrun/datastore/store_resources.py +3 -3
  20. mlrun/datastore/targets.py +5 -5
  21. mlrun/datastore/utils.py +21 -6
  22. mlrun/db/base.py +7 -7
  23. mlrun/db/httpdb.py +88 -76
  24. mlrun/db/nopdb.py +1 -1
  25. mlrun/errors.py +29 -1
  26. mlrun/execution.py +9 -0
  27. mlrun/feature_store/common.py +5 -5
  28. mlrun/feature_store/feature_set.py +10 -6
  29. mlrun/feature_store/feature_vector.py +8 -6
  30. mlrun/launcher/base.py +1 -1
  31. mlrun/launcher/client.py +1 -1
  32. mlrun/lists.py +1 -1
  33. mlrun/model_monitoring/__init__.py +0 -1
  34. mlrun/model_monitoring/api.py +0 -44
  35. mlrun/model_monitoring/applications/evidently/base.py +57 -107
  36. mlrun/model_monitoring/controller.py +27 -14
  37. mlrun/model_monitoring/db/tsdb/v3io/v3io_connector.py +13 -5
  38. mlrun/model_monitoring/writer.py +1 -4
  39. mlrun/projects/operations.py +3 -3
  40. mlrun/projects/project.py +114 -52
  41. mlrun/render.py +5 -9
  42. mlrun/run.py +10 -10
  43. mlrun/runtimes/base.py +7 -7
  44. mlrun/runtimes/kubejob.py +2 -2
  45. mlrun/runtimes/nuclio/function.py +3 -3
  46. mlrun/runtimes/nuclio/serving.py +13 -23
  47. mlrun/runtimes/utils.py +25 -8
  48. mlrun/serving/__init__.py +5 -1
  49. mlrun/serving/server.py +39 -3
  50. mlrun/serving/states.py +176 -10
  51. mlrun/utils/helpers.py +10 -4
  52. mlrun/utils/version/version.json +2 -2
  53. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/METADATA +27 -15
  54. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/RECORD +58 -59
  55. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/WHEEL +1 -1
  56. mlrun/model_monitoring/tracking_policy.py +0 -124
  57. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/entry_points.txt +0 -0
  58. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/licenses/LICENSE +0 -0
  59. {mlrun-1.10.0rc1.dist-info → mlrun-1.10.0rc3.dist-info}/top_level.txt +0 -0
mlrun/__init__.py CHANGED
@@ -120,7 +120,7 @@ def set_environment(
120
120
  :param mock_functions: set to True to create local/mock functions instead of real containers,
121
121
  set to "auto" to auto determine based on the presence of k8s/Nuclio
122
122
  :returns:
123
- default project name
123
+ active project name
124
124
  actual artifact path/url, can be used to create subpaths per task or group of artifacts
125
125
  """
126
126
  if env_file:
@@ -161,7 +161,7 @@ def set_environment(
161
161
  )
162
162
  mlconf.artifact_path = artifact_path
163
163
 
164
- return mlconf.default_project, mlconf.artifact_path
164
+ return mlconf.active_project, mlconf.artifact_path
165
165
 
166
166
 
167
167
  def get_current_project(silent: bool = False) -> Optional[MlrunProject]:
mlrun/__main__.py CHANGED
@@ -13,6 +13,8 @@
13
13
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
14
14
  # See the License for the specific language governing permissions and
15
15
  # limitations under the License.
16
+ import functools
17
+ import importlib.metadata
16
18
  import json
17
19
  import pathlib
18
20
  import socket
@@ -25,12 +27,14 @@ from pprint import pprint
25
27
  import click
26
28
  import dotenv
27
29
  import pandas as pd
30
+ import semver
28
31
  import yaml
29
32
  from tabulate import tabulate
30
33
 
31
34
  import mlrun
32
35
  import mlrun.common.constants as mlrun_constants
33
36
  import mlrun.common.schemas
37
+ import mlrun.platforms
34
38
  import mlrun.utils.helpers
35
39
  from mlrun.common.helpers import parse_versioned_object_uri
36
40
  from mlrun.runtimes.mounts import auto_mount as auto_mount_modifier
@@ -63,12 +67,19 @@ from .utils.version import Version
63
67
  pd.set_option("mode.chained_assignment", None)
64
68
 
65
69
 
66
- def validate_base_argument(ctx, param, value):
70
+ def validate_base_argument(ctx: click.Context, param: click.Parameter, value: str):
71
+ # click 8.2 expects the context to be passed to make_metavar
72
+ if semver.VersionInfo.parse(
73
+ importlib.metadata.version("click")
74
+ ) < semver.VersionInfo.parse("8.2.0"):
75
+ metavar_func = functools.partial(param.make_metavar)
76
+ else:
77
+ metavar_func = functools.partial(param.make_metavar, ctx)
67
78
  if value and value.startswith("-"):
68
79
  raise click.BadParameter(
69
80
  f"{param.human_readable_name} ({value}) cannot start with '-', ensure the command options are typed "
70
81
  f"correctly. Preferably use '--' to separate options and arguments "
71
- f"e.g. 'mlrun run --option1 --option2 -- {param.make_metavar()} [--arg1|arg1] [--arg2|arg2]'",
82
+ f"e.g. 'mlrun run --option1 --option2 -- {metavar_func()} [--arg1|arg1] [--arg2|arg2]'",
72
83
  ctx=ctx,
73
84
  param=param,
74
85
  )
@@ -539,7 +550,7 @@ def build(
539
550
  exit(1)
540
551
 
541
552
  meta = func.metadata
542
- meta.project = project or meta.project or mlconf.default_project
553
+ meta.project = project or meta.project or mlconf.active_project
543
554
  meta.name = name or meta.name
544
555
  meta.tag = tag or meta.tag
545
556
 
@@ -859,7 +870,7 @@ def version():
859
870
  @main.command()
860
871
  @click.argument("uid", type=str)
861
872
  @click.option(
862
- "--project", "-p", help="project name (defaults to mlrun.mlconf.default_project)"
873
+ "--project", "-p", help="project name (defaults to mlrun.mlconf.active_project)"
863
874
  )
864
875
  @click.option("--offset", type=int, default=0, help="byte offset")
865
876
  @click.option("--db", help="api and db service path/url")
mlrun/artifacts/base.py CHANGED
@@ -219,7 +219,7 @@ class Artifact(ModelObj):
219
219
  project=None,
220
220
  src_path: typing.Optional[str] = None,
221
221
  # All params up until here are legacy params for compatibility with legacy artifacts.
222
- # TODO: remove them in 1.9.0.
222
+ # TODO: remove them in 1.10.0.
223
223
  metadata: ArtifactMetadata = None,
224
224
  spec: ArtifactSpec = None,
225
225
  ):
@@ -235,7 +235,7 @@ class Artifact(ModelObj):
235
235
  or src_path
236
236
  ):
237
237
  warnings.warn(
238
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
238
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
239
239
  "Use the metadata and spec parameters instead.",
240
240
  DeprecationWarning,
241
241
  )
@@ -247,7 +247,7 @@ class Artifact(ModelObj):
247
247
 
248
248
  self.metadata.key = key or self.metadata.key
249
249
  self.metadata.project = (
250
- project or mlrun.mlconf.default_project or self.metadata.project
250
+ project or mlrun.mlconf.active_project or self.metadata.project
251
251
  )
252
252
  self.spec.size = size or self.spec.size
253
253
  self.spec.target_path = target_path or self.spec.target_path
@@ -758,13 +758,13 @@ class LinkArtifact(Artifact):
758
758
  link_tree=None,
759
759
  project=None,
760
760
  # All params up until here are legacy params for compatibility with legacy artifacts.
761
- # TODO: remove them in 1.9.0.
761
+ # TODO: remove them in 1.10.0.
762
762
  metadata: ArtifactMetadata = None,
763
763
  spec: LinkArtifactSpec = None,
764
764
  ):
765
765
  if key or target_path or link_iteration or link_key or link_tree or project:
766
766
  warnings.warn(
767
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
767
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
768
768
  "Use the metadata and spec parameters instead.",
769
769
  DeprecationWarning,
770
770
  )
@@ -907,7 +907,7 @@ def convert_legacy_artifact_to_new_format(
907
907
  artifact_key = f"{artifact_key}:{artifact_tag}"
908
908
  # TODO: Remove once data migration v5 is obsolete
909
909
  warnings.warn(
910
- f"Converting legacy artifact '{artifact_key}' to new format. This will not be supported in MLRun 1.9.0. "
910
+ f"Converting legacy artifact '{artifact_key}' to new format. This will not be supported in MLRun 1.10.0. "
911
911
  f"Make sure to save the artifact/project in the new format.",
912
912
  FutureWarning,
913
913
  )
@@ -163,7 +163,7 @@ class DatasetArtifact(Artifact):
163
163
  ):
164
164
  if key or format or target_path:
165
165
  warnings.warn(
166
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
166
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
167
167
  "Use the metadata and spec parameters instead.",
168
168
  DeprecationWarning,
169
169
  )
@@ -190,7 +190,7 @@ class MLRunLoader:
190
190
 
191
191
  # Resolve the producer
192
192
  if not self.producer:
193
- self.producer = mlrun.mlconf.default_project
193
+ self.producer = mlrun.mlconf.active_project
194
194
  if isinstance(self.producer, str):
195
195
  self.producer = mlrun.get_or_create_project(self.producer)
196
196
 
mlrun/artifacts/model.py CHANGED
@@ -152,7 +152,7 @@ class ModelArtifact(Artifact):
152
152
  ):
153
153
  if key or body or format or target_path:
154
154
  warnings.warn(
155
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
155
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
156
156
  "Use the metadata and spec parameters instead.",
157
157
  DeprecationWarning,
158
158
  )
mlrun/artifacts/plots.py CHANGED
@@ -37,7 +37,7 @@ class PlotArtifact(Artifact):
37
37
  ):
38
38
  if key or body or is_inline or target_path:
39
39
  warnings.warn(
40
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
40
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
41
41
  "Use the metadata and spec parameters instead.",
42
42
  DeprecationWarning,
43
43
  )
@@ -96,7 +96,7 @@ class PlotlyArtifact(Artifact):
96
96
  """
97
97
  if key or target_path:
98
98
  warnings.warn(
99
- "Artifact constructor parameters are deprecated and will be removed in 1.9.0. "
99
+ "Artifact constructor parameters are deprecated in 1.7.0 and will be removed in 1.10.0. "
100
100
  "Use the metadata and spec parameters instead.",
101
101
  DeprecationWarning,
102
102
  )
mlrun/common/constants.py CHANGED
@@ -90,6 +90,13 @@ class MLRunInternalLabels:
90
90
  if not key.startswith("__") and isinstance(value, str)
91
91
  ]
92
92
 
93
+ @staticmethod
94
+ def default_run_labels_to_enrich():
95
+ return [
96
+ MLRunInternalLabels.owner,
97
+ MLRunInternalLabels.v3io_user,
98
+ ]
99
+
93
100
 
94
101
  class DeployStatusTextKind(mlrun.common.types.StrEnum):
95
102
  logs = "logs"
@@ -237,7 +237,7 @@ class RunStates:
237
237
  }[pipeline_run_status]
238
238
 
239
239
 
240
- # TODO: remove this class in 1.9.0 - use only MlrunInternalLabels
240
+ # TODO: remove this class in 1.10.0 - use only MlrunInternalLabels
241
241
  class RunLabels(enum.Enum):
242
242
  owner = mlrun_constants.MLRunInternalLabels.owner
243
243
  v3io_user = mlrun_constants.MLRunInternalLabels.v3io_user
@@ -214,6 +214,7 @@ from .secret import (
214
214
  SecretsData,
215
215
  UserSecretCreationRequest,
216
216
  )
217
+ from .serving import ModelRunnerStepData, MonitoringData
217
218
  from .tag import Tag, TagObjects
218
219
  from .workflow import (
219
220
  GetWorkflowResponse,
@@ -80,7 +80,7 @@ class ArtifactIdentifier(pydantic.v1.BaseModel):
80
80
 
81
81
  @deprecated(
82
82
  version="1.7.0",
83
- reason="mlrun.common.schemas.ArtifactsFormat is deprecated and will be removed in 1.9.0. "
83
+ reason="mlrun.common.schemas.ArtifactsFormat is deprecated and will be removed in 1.10.0. "
84
84
  "Use mlrun.common.formatters.ArtifactFormat instead.",
85
85
  category=FutureWarning,
86
86
  )
@@ -22,7 +22,7 @@ import mlrun.common.types
22
22
 
23
23
  @deprecated(
24
24
  version="1.7.0",
25
- reason="mlrun.common.schemas.PipelinesFormat is deprecated and will be removed in 1.9.0. "
25
+ reason="mlrun.common.schemas.PipelinesFormat is deprecated and will be removed in 1.10.0. "
26
26
  "Use mlrun.common.formatters.PipelineFormat instead.",
27
27
  category=FutureWarning,
28
28
  )
@@ -26,7 +26,7 @@ from .object import ObjectKind, ObjectStatus
26
26
 
27
27
  @deprecated(
28
28
  version="1.7.0",
29
- reason="mlrun.common.schemas.ProjectsFormat is deprecated and will be removed in 1.9.0. "
29
+ reason="mlrun.common.schemas.ProjectsFormat is deprecated and will be removed in 1.10.0. "
30
30
  "Use mlrun.common.formatters.ProjectFormat instead.",
31
31
  category=FutureWarning,
32
32
  )
@@ -28,7 +28,7 @@ class RunIdentifier(pydantic.v1.BaseModel):
28
28
 
29
29
  @deprecated(
30
30
  version="1.7.0",
31
- reason="mlrun.common.schemas.RunsFormat is deprecated and will be removed in 1.9.0. "
31
+ reason="mlrun.common.schemas.RunsFormat is deprecated and will be removed in 1.10.0. "
32
32
  "Use mlrun.common.formatters.RunFormat instead.",
33
33
  category=FutureWarning,
34
34
  )
@@ -14,9 +14,26 @@
14
14
 
15
15
  from pydantic.v1 import BaseModel
16
16
 
17
+ from mlrun.common.types import StrEnum
18
+
17
19
  from .background_task import BackgroundTaskList
18
20
 
19
21
 
20
22
  class DeployResponse(BaseModel):
21
23
  data: dict
22
24
  background_tasks: BackgroundTaskList
25
+
26
+
27
+ class ModelRunnerStepData(StrEnum):
28
+ MODELS = "models"
29
+ MONITORING_DATA = "monitoring_data"
30
+
31
+
32
+ class MonitoringData(StrEnum):
33
+ INPUTS = "inputs"
34
+ OUTPUTS = "outputs"
35
+ INPUT_PATH = "input_path"
36
+ CREATION_STRATEGY = "creation_strategy"
37
+ LABELS = "labels"
38
+ MODEL_PATH = "model_path"
39
+ MODEL_ENDPOINT_UID = "model_endpoint_uid"
mlrun/config.py CHANGED
@@ -79,7 +79,7 @@ default_config = {
79
79
  # comma separated list of images that are in the specified images_registry, and therefore will be enriched with this
80
80
  # registry when used. default to mlrun/* which means any image which is of the mlrun repository (mlrun/mlrun,
81
81
  # mlrun/ml-base, etc...)
82
- "images_to_enrich_registry": "^mlrun/*,python:3.9",
82
+ "images_to_enrich_registry": "^mlrun/*,^python:3.(9|11)$",
83
83
  "kfp_url": "",
84
84
  "kfp_ttl": "14400", # KFP ttl in sec, after that completed PODs will be deleted
85
85
  "kfp_image": "mlrun/mlrun-kfp", # image to use for KFP runner
@@ -94,7 +94,7 @@ default_config = {
94
94
  "default_base_image": "mlrun/mlrun", # default base image when doing .deploy()
95
95
  # template for project default image name. Parameter {name} will be replaced with project name
96
96
  "default_project_image_name": ".mlrun-project-image-{name}",
97
- "default_project": "default", # default project name
97
+ "active_project": "", # active project name
98
98
  "default_archive": "", # default remote archive URL (for build tar.gz)
99
99
  "mpijob_crd_version": "", # mpijob crd version (e.g: "v1alpha1". must be in: mlrun.runtime.MPIJobCRDVersions)
100
100
  "ipython_widget": True,
@@ -286,7 +286,7 @@ default_config = {
286
286
  "remote": "mlrun/mlrun",
287
287
  "dask": "mlrun/ml-base",
288
288
  "mpijob": "mlrun/mlrun",
289
- "application": "python:3.9",
289
+ "application": "python",
290
290
  },
291
291
  # see enrich_function_preemption_spec for more info,
292
292
  # and mlrun.common.schemas.function.PreemptionModes for available options
@@ -482,7 +482,7 @@ default_config = {
482
482
  "project_owners_cache_ttl": "30 seconds",
483
483
  # access key to be used when the leader is iguazio and polling is done from it
484
484
  "iguazio_access_key": "",
485
- "iguazio_list_projects_default_page_size": 200,
485
+ "iguazio_list_projects_default_page_size": 500,
486
486
  "iguazio_client_job_cache_ttl": "20 minutes",
487
487
  "nuclio_project_deletion_verification_timeout": "300 seconds",
488
488
  "nuclio_project_deletion_verification_interval": "5 seconds",
@@ -16,7 +16,6 @@ import ast
16
16
  import base64
17
17
  import json
18
18
  import typing
19
- import warnings
20
19
  from urllib.parse import ParseResult, urlparse
21
20
 
22
21
  import pydantic.v1
@@ -142,7 +141,6 @@ class ConfigProfile(DatastoreProfile):
142
141
  class DatastoreProfileKafkaTarget(DatastoreProfile):
143
142
  type: str = pydantic.v1.Field("kafka_target")
144
143
  _private_attributes = "kwargs_private"
145
- bootstrap_servers: typing.Optional[str] = None
146
144
  brokers: typing.Optional[str] = None
147
145
  topic: str
148
146
  kwargs_public: typing.Optional[dict]
@@ -151,31 +149,16 @@ class DatastoreProfileKafkaTarget(DatastoreProfile):
151
149
  def __init__(self, **kwargs):
152
150
  super().__init__(**kwargs)
153
151
 
154
- if not self.brokers and not self.bootstrap_servers:
152
+ if not self.brokers:
155
153
  raise mlrun.errors.MLRunInvalidArgumentError(
156
154
  "DatastoreProfileKafkaTarget requires the 'brokers' field to be set"
157
155
  )
158
156
 
159
- if self.bootstrap_servers:
160
- if self.brokers:
161
- raise mlrun.errors.MLRunInvalidArgumentError(
162
- "DatastoreProfileKafkaTarget cannot be created with both 'brokers' and 'bootstrap_servers'"
163
- )
164
- else:
165
- self.brokers = self.bootstrap_servers
166
- self.bootstrap_servers = None
167
- warnings.warn(
168
- "'bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in 1.9.0, "
169
- "use 'brokers' instead.",
170
- # TODO: Remove this in 1.9.0
171
- FutureWarning,
172
- )
173
-
174
157
  def get_topic(self) -> typing.Optional[str]:
175
158
  return self.topic
176
159
 
177
160
  def attributes(self):
178
- attributes = {"brokers": self.brokers or self.bootstrap_servers}
161
+ attributes = {"brokers": self.brokers}
179
162
  if self.kwargs_public:
180
163
  attributes = merge(attributes, self.kwargs_public)
181
164
  if self.kwargs_private:
@@ -248,18 +231,7 @@ class DatastoreProfileS3(DatastoreProfile):
248
231
  assume_role_arn: typing.Optional[str] = None
249
232
  access_key_id: typing.Optional[str] = None
250
233
  secret_key: typing.Optional[str] = None
251
- bucket: typing.Optional[str] = None
252
-
253
- @pydantic.v1.validator("bucket")
254
- @classmethod
255
- def check_bucket(cls, v):
256
- if not v:
257
- warnings.warn(
258
- "The 'bucket' attribute will be mandatory starting from version 1.9",
259
- FutureWarning,
260
- stacklevel=2,
261
- )
262
- return v
234
+ bucket: str
263
235
 
264
236
  def secrets(self) -> dict:
265
237
  res = {}
@@ -353,18 +325,7 @@ class DatastoreProfileGCS(DatastoreProfile):
353
325
  _private_attributes = ("gcp_credentials",)
354
326
  credentials_path: typing.Optional[str] = None # path to file.
355
327
  gcp_credentials: typing.Optional[typing.Union[str, dict]] = None
356
- bucket: typing.Optional[str] = None
357
-
358
- @pydantic.v1.validator("bucket")
359
- @classmethod
360
- def check_bucket(cls, v):
361
- if not v:
362
- warnings.warn(
363
- "The 'bucket' attribute will be mandatory starting from version 1.9",
364
- FutureWarning,
365
- stacklevel=2,
366
- )
367
- return v
328
+ bucket: str
368
329
 
369
330
  @pydantic.v1.validator("gcp_credentials", pre=True, always=True)
370
331
  @classmethod
@@ -410,18 +371,7 @@ class DatastoreProfileAzureBlob(DatastoreProfile):
410
371
  client_secret: typing.Optional[str] = None
411
372
  sas_token: typing.Optional[str] = None
412
373
  credential: typing.Optional[str] = None
413
- container: typing.Optional[str] = None
414
-
415
- @pydantic.v1.validator("container")
416
- @classmethod
417
- def check_container(cls, v):
418
- if not v:
419
- warnings.warn(
420
- "The 'container' attribute will be mandatory starting from version 1.9",
421
- FutureWarning,
422
- stacklevel=2,
423
- )
424
- return v
374
+ container: str
425
375
 
426
376
  def url(self, subpath) -> str:
427
377
  if subpath.startswith("/"):
@@ -605,7 +555,7 @@ def datastore_profile_read(url, project_name="", secrets: typing.Optional[dict]
605
555
  url (str): A URL with 'ds' scheme pointing to the datastore profile
606
556
  (e.g., 'ds://profile-name').
607
557
  project_name (str, optional): The project name where the profile is stored.
608
- Defaults to MLRun's default project.
558
+ Defaults to MLRun's active project.
609
559
  secrets (dict, optional): Dictionary containing secrets needed for profile retrieval.
610
560
 
611
561
  Returns:
@@ -630,7 +580,7 @@ def datastore_profile_read(url, project_name="", secrets: typing.Optional[dict]
630
580
  )
631
581
 
632
582
  profile_name = parsed_url.hostname
633
- project_name = project_name or mlrun.mlconf.default_project
583
+ project_name = project_name or mlrun.mlconf.active_project
634
584
  datastore = TemporaryClientDatastoreProfiles().get(profile_name)
635
585
  if datastore:
636
586
  return datastore
@@ -11,6 +11,7 @@
11
11
  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
12
  # See the License for the specific language governing permissions and
13
13
  # limitations under the License.
14
+
14
15
  import json
15
16
  import operator
16
17
  import os
@@ -18,7 +19,7 @@ import warnings
18
19
  from base64 import b64encode
19
20
  from copy import copy
20
21
  from datetime import datetime
21
- from typing import Any, Optional, Union
22
+ from typing import Any, Literal, Optional, Union
22
23
 
23
24
  import pandas as pd
24
25
  import semver
@@ -794,12 +795,12 @@ class SnowflakeSource(BaseSourceDriver):
794
795
  warehouse: Optional[str] = None,
795
796
  **kwargs,
796
797
  ):
797
- # TODO: Remove in 1.9.0
798
+ # TODO: Remove in 1.10.0
798
799
  if schema:
799
800
  warnings.warn(
800
- "schema is deprecated in 1.7.0, and will be removed in 1.9.0, please use db_schema"
801
+ "schema is deprecated in 1.7.0, and will be removed in 1.10.0, please use db_schema"
801
802
  )
802
- db_schema = db_schema or schema # TODO: Remove in 1.9.0
803
+ db_schema = db_schema or schema # TODO: Remove in 1.10.0
803
804
 
804
805
  attributes = attributes or {}
805
806
  if url:
@@ -1063,16 +1064,17 @@ class KafkaSource(OnlineSource):
1063
1064
 
1064
1065
  def __init__(
1065
1066
  self,
1066
- brokers=None,
1067
- topics=None,
1068
- group="serving",
1069
- initial_offset="earliest",
1070
- partitions=None,
1071
- sasl_user=None,
1072
- sasl_pass=None,
1073
- attributes=None,
1067
+ brokers: Optional[list[str]] = None,
1068
+ topics: Optional[list[str]] = None,
1069
+ group: str = "serving",
1070
+ initial_offset: Literal["earliest", "latest"] = "earliest",
1071
+ partitions: Optional[list[int]] = None,
1072
+ sasl_user: Optional[str] = None,
1073
+ sasl_pass: Optional[str] = None,
1074
+ tls_enable: Optional[bool] = None,
1075
+ attributes: Optional[dict] = None,
1074
1076
  **kwargs,
1075
- ):
1077
+ ) -> None:
1076
1078
  """Sets kafka source for the flow
1077
1079
 
1078
1080
  :param brokers: list of broker IP addresses
@@ -1082,6 +1084,7 @@ class KafkaSource(OnlineSource):
1082
1084
  :param partitions: Optional, A list of partitions numbers for which the function receives events.
1083
1085
  :param sasl_user: Optional, user name to use for sasl authentications
1084
1086
  :param sasl_pass: Optional, password to use for sasl authentications
1087
+ :param tls_enable: Optional, if set - whether to enable TLS or not.
1085
1088
  :param attributes: Optional, extra attributes to be passed to kafka trigger
1086
1089
  """
1087
1090
  if isinstance(topics, str):
@@ -1095,10 +1098,15 @@ class KafkaSource(OnlineSource):
1095
1098
  attributes["initial_offset"] = initial_offset
1096
1099
  if partitions is not None:
1097
1100
  attributes["partitions"] = partitions
1098
- if sasl := mlrun.datastore.utils.KafkaParameters(attributes).sasl(
1099
- usr=sasl_user, pwd=sasl_pass
1100
- ):
1101
+
1102
+ kafka_params = mlrun.datastore.utils.KafkaParameters(attributes)
1103
+
1104
+ if sasl := kafka_params.sasl(usr=sasl_user, pwd=sasl_pass):
1101
1105
  attributes["sasl"] = sasl
1106
+
1107
+ if tls := kafka_params.tls(tls_enable=tls_enable):
1108
+ attributes["tls"] = tls
1109
+
1102
1110
  super().__init__(attributes=attributes, **kwargs)
1103
1111
 
1104
1112
  def to_dataframe(
@@ -152,19 +152,19 @@ def get_store_resource(
152
152
  )
153
153
  elif kind == StorePrefix.FeatureSet:
154
154
  project, name, tag, uid = parse_versioned_object_uri(
155
- uri, project or config.default_project
155
+ uri, project or config.active_project
156
156
  )
157
157
  return db.get_feature_set(name, project, tag, uid)
158
158
 
159
159
  elif kind == StorePrefix.FeatureVector:
160
160
  project, name, tag, uid = parse_versioned_object_uri(
161
- uri, project or config.default_project
161
+ uri, project or config.active_project
162
162
  )
163
163
  return db.get_feature_vector(name, project, tag, uid)
164
164
 
165
165
  elif StorePrefix.is_artifact(kind):
166
166
  project, key, iteration, tag, tree, uid = parse_artifact_uri(
167
- uri, project or config.default_project
167
+ uri, project or config.active_project
168
168
  )
169
169
  resource = db.read_artifact(
170
170
  key,
@@ -443,8 +443,8 @@ class BaseStoreTarget(DataTargetBase):
443
443
  self.credentials_prefix = credentials_prefix
444
444
  if credentials_prefix:
445
445
  warnings.warn(
446
- "The 'credentials_prefix' parameter is deprecated and will be removed in "
447
- "1.9.0. Please use datastore profiles instead.",
446
+ "The 'credentials_prefix' parameter is deprecated in 1.7.0 and will be removed in "
447
+ "1.10.0. Please use datastore profiles instead.",
448
448
  FutureWarning,
449
449
  )
450
450
 
@@ -1671,7 +1671,7 @@ class KafkaTarget(BaseStoreTarget):
1671
1671
  ):
1672
1672
  attrs = {}
1673
1673
 
1674
- # TODO: Remove this in 1.9.0
1674
+ # TODO: Remove this in 1.10.0
1675
1675
  if bootstrap_servers:
1676
1676
  if brokers:
1677
1677
  raise mlrun.errors.MLRunInvalidArgumentError(
@@ -1679,7 +1679,7 @@ class KafkaTarget(BaseStoreTarget):
1679
1679
  "'bootstrap_servers' parameter. Please use 'brokers' only."
1680
1680
  )
1681
1681
  warnings.warn(
1682
- "'bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in 1.9.0, "
1682
+ "'bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in 1.10.0, "
1683
1683
  "use 'brokers' instead.",
1684
1684
  FutureWarning,
1685
1685
  )
@@ -2239,7 +2239,7 @@ def _get_target_path(driver, resource, run_id_mode=False, netloc=None, scheme=""
2239
2239
  else "vectors"
2240
2240
  )
2241
2241
  name = resource.metadata.name
2242
- project = resource.metadata.project or mlrun.mlconf.default_project
2242
+ project = resource.metadata.project or mlrun.mlconf.active_project
2243
2243
 
2244
2244
  default_kind_name = kind
2245
2245
  if scheme == "ds":
mlrun/datastore/utils.py CHANGED
@@ -176,8 +176,8 @@ def get_kafka_brokers_from_dict(options: dict, pop=False) -> typing.Optional[str
176
176
  kafka_bootstrap_servers = get_or_pop("kafka_bootstrap_servers", None)
177
177
  if kafka_bootstrap_servers:
178
178
  warnings.warn(
179
- "The 'kafka_bootstrap_servers' parameter is deprecated and will be removed in "
180
- "1.9.0. Please pass the 'kafka_brokers' parameter instead.",
179
+ "The 'kafka_bootstrap_servers' parameter is deprecated in 1.7.0 and will be removed in "
180
+ "1.10.0. Please pass the 'kafka_brokers' parameter instead.",
181
181
  FutureWarning,
182
182
  )
183
183
  return kafka_bootstrap_servers
@@ -246,6 +246,9 @@ class KafkaParameters:
246
246
  "partitions": "",
247
247
  "sasl": "",
248
248
  "worker_allocation_mode": "",
249
+ "tls_enable": "", # for Nuclio with Confluent Kafka (Sarama client)
250
+ "tls": "",
251
+ "new_topic": "",
249
252
  }
250
253
  self._reference_dicts = (
251
254
  self._custom_attributes,
@@ -270,7 +273,9 @@ class KafkaParameters:
270
273
  }
271
274
  if sasl := self._kwargs.get("sasl"):
272
275
  res |= {
273
- "security_protocol": "SASL_PLAINTEXT",
276
+ "security_protocol": self._kwargs.get(
277
+ "security_protocol", "SASL_PLAINTEXT"
278
+ ),
274
279
  "sasl_mechanism": sasl["mechanism"],
275
280
  "sasl_plain_username": sasl["user"],
276
281
  "sasl_plain_password": sasl["password"],
@@ -288,15 +293,25 @@ class KafkaParameters:
288
293
 
289
294
  def sasl(
290
295
  self, *, usr: typing.Optional[str] = None, pwd: typing.Optional[str] = None
291
- ) -> dict:
292
- usr = usr or self._kwargs.get("sasl_plain_username", None)
293
- pwd = pwd or self._kwargs.get("sasl_plain_password", None)
296
+ ) -> dict[str, typing.Union[str, bool]]:
294
297
  res = self._kwargs.get("sasl", {})
298
+ usr = usr or self._kwargs.get("sasl_plain_username")
299
+ pwd = pwd or self._kwargs.get("sasl_plain_password")
295
300
  if usr and pwd:
296
301
  res["enable"] = True
297
302
  res["user"] = usr
298
303
  res["password"] = pwd
299
304
  res["mechanism"] = self._kwargs.get("sasl_mechanism", "PLAIN")
305
+ res["handshake"] = self._kwargs.get("sasl_handshake", True)
306
+ return res
307
+
308
+ def tls(self, *, tls_enable: typing.Optional[bool] = None) -> dict[str, bool]:
309
+ res = self._kwargs.get("tls", {})
310
+ tls_enable = (
311
+ tls_enable if tls_enable is not None else self._kwargs.get("tls_enable")
312
+ )
313
+ if tls_enable:
314
+ res["enable"] = tls_enable
300
315
  return res
301
316
 
302
317
  def valid_entries_only(self, input_dict: dict) -> dict: