mlrun 1.5.0rc4__py3-none-any.whl → 1.5.0rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. mlrun/api/api/endpoints/datastore_profile.py +35 -13
  2. mlrun/api/api/endpoints/files.py +1 -1
  3. mlrun/api/api/endpoints/frontend_spec.py +1 -10
  4. mlrun/api/api/endpoints/functions.py +28 -18
  5. mlrun/api/api/endpoints/hub.py +2 -6
  6. mlrun/api/api/endpoints/pipelines.py +5 -1
  7. mlrun/api/api/endpoints/projects.py +1 -0
  8. mlrun/api/api/endpoints/workflows.py +1 -0
  9. mlrun/api/api/utils.py +18 -0
  10. mlrun/api/crud/client_spec.py +3 -0
  11. mlrun/api/crud/datastore_profiles.py +2 -2
  12. mlrun/api/crud/hub.py +158 -142
  13. mlrun/api/crud/model_monitoring/deployment.py +3 -0
  14. mlrun/api/crud/model_monitoring/model_endpoints.py +1 -1
  15. mlrun/api/crud/pipelines.py +10 -4
  16. mlrun/api/crud/workflows.py +11 -4
  17. mlrun/api/db/session.py +7 -2
  18. mlrun/api/db/sqldb/db.py +19 -21
  19. mlrun/api/db/sqldb/models/models_mysql.py +10 -1
  20. mlrun/api/db/sqldb/models/models_sqlite.py +11 -1
  21. mlrun/api/initial_data.py +3 -5
  22. mlrun/api/launcher.py +2 -1
  23. mlrun/api/migrations_mysql/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  24. mlrun/api/migrations_sqlite/versions/026c947c4487_altering_table_datastore_profiles_2.py +46 -0
  25. mlrun/api/rundb/sqldb.py +113 -61
  26. mlrun/api/utils/db/sqlite_migration.py +1 -0
  27. mlrun/common/model_monitoring/helpers.py +3 -1
  28. mlrun/common/schemas/client_spec.py +1 -0
  29. mlrun/common/schemas/datastore_profile.py +1 -1
  30. mlrun/common/schemas/frontend_spec.py +1 -1
  31. mlrun/config.py +3 -2
  32. mlrun/datastore/datastore_profile.py +33 -21
  33. mlrun/datastore/dbfs_store.py +9 -8
  34. mlrun/datastore/redis.py +6 -0
  35. mlrun/datastore/targets.py +12 -1
  36. mlrun/db/base.py +1 -1
  37. mlrun/db/factory.py +3 -0
  38. mlrun/db/httpdb.py +14 -13
  39. mlrun/db/nopdb.py +1 -1
  40. mlrun/feature_store/api.py +4 -1
  41. mlrun/feature_store/feature_set.py +3 -1
  42. mlrun/feature_store/ingestion.py +1 -0
  43. mlrun/kfpops.py +8 -2
  44. mlrun/launcher/base.py +1 -1
  45. mlrun/model.py +7 -5
  46. mlrun/projects/pipelines.py +7 -6
  47. mlrun/projects/project.py +2 -2
  48. mlrun/run.py +1 -1
  49. mlrun/runtimes/__init__.py +1 -0
  50. mlrun/utils/helpers.py +1 -1
  51. mlrun/utils/notifications/notification/webhook.py +9 -1
  52. mlrun/utils/version/version.json +2 -2
  53. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/METADATA +6 -5
  54. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/RECORD +58 -56
  55. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/LICENSE +0 -0
  56. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/WHEEL +0 -0
  57. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/entry_points.txt +0 -0
  58. {mlrun-1.5.0rc4.dist-info → mlrun-1.5.0rc6.dist-info}/top_level.txt +0 -0
@@ -19,5 +19,5 @@ from pydantic import BaseModel
19
19
  class DatastoreProfile(BaseModel):
20
20
  name: str
21
21
  type: str
22
- body: str
22
+ object: str
23
23
  project: str
@@ -60,7 +60,7 @@ class FrontendSpec(pydantic.BaseModel):
60
60
  function_deployment_target_image_template: typing.Optional[str]
61
61
  function_deployment_target_image_name_prefix_template: str
62
62
  function_deployment_target_image_registries_to_enforce_prefix: typing.List[str] = []
63
- function_deployment_mlrun_command: typing.Optional[str]
63
+ function_deployment_mlrun_requirement: typing.Optional[str]
64
64
  auto_mount_type: typing.Optional[str]
65
65
  auto_mount_params: typing.Dict[str, str] = {}
66
66
  default_artifact_path: str
mlrun/config.py CHANGED
@@ -453,7 +453,8 @@ default_config = {
453
453
  "data_prefixes": {
454
454
  "default": "v3io:///projects/{project}/FeatureStore/{name}/{kind}",
455
455
  "nosql": "v3io:///projects/{project}/FeatureStore/{name}/{kind}",
456
- "redisnosql": "redis:///projects/{project}/FeatureStore/{name}/{kind}",
456
+ # "authority" is optional and generalizes [userinfo "@"] host [":" port]
457
+ "redisnosql": "redis://{authority}/projects/{project}/FeatureStore/{name}/{kind}",
457
458
  },
458
459
  "default_targets": "parquet,nosql",
459
460
  "default_job_image": "mlrun/mlrun",
@@ -1010,7 +1011,7 @@ class Config:
1010
1011
  if artifact_path[-1] != "/":
1011
1012
  artifact_path += "/"
1012
1013
 
1013
- return mlrun.utils.helpers.fill_artifact_path_template(
1014
+ return mlrun.utils.helpers.fill_project_path_template(
1014
1015
  artifact_path=artifact_path + file_path, project=project
1015
1016
  )
1016
1017
 
@@ -26,18 +26,10 @@ import mlrun.errors
26
26
  from ..secrets import get_secret_or_env
27
27
 
28
28
 
29
- class PrivateValue(pydantic.BaseModel):
30
- value: str
31
-
32
- def get(self):
33
- if self.value == "None":
34
- return None
35
- return ast.literal_eval(self.value)["value"]
36
-
37
-
38
29
  class DatastoreProfile(pydantic.BaseModel):
39
30
  type: str
40
31
  name: str
32
+ _private_attributes: typing.List = ()
41
33
 
42
34
  @pydantic.validator("name")
43
35
  def lower_case(cls, v):
@@ -58,21 +50,19 @@ class DatastoreProfile(pydantic.BaseModel):
58
50
 
59
51
  class DatastoreProfileRedis(DatastoreProfile):
60
52
  type: str = pydantic.Field("redis")
53
+ _private_attributes = ("username", "password")
61
54
  endpoint_url: str
62
- username: typing.Optional[PrivateValue]
63
- password: typing.Optional[PrivateValue]
64
-
65
- @pydantic.validator("username", "password", pre=True)
66
- def convert_to_private(cls, v):
67
- return PrivateValue(value=v)
55
+ username: typing.Optional[str] = None
56
+ password: typing.Optional[str] = None
68
57
 
69
58
  def is_secured(self):
70
59
  return self.endpoint_url.startswith("rediss://")
71
60
 
72
61
  def url_with_credentials(self):
73
62
  parsed_url = urlparse(self.endpoint_url)
74
- username = self.username.get() if self.username else None
75
- password = self.password.get() if self.password else None
63
+ username = self.username
64
+ password = self.password
65
+ netloc = parsed_url.hostname
76
66
  if username:
77
67
  if password:
78
68
  netloc = f"{username}:{password}@{parsed_url.hostname}"
@@ -106,13 +96,21 @@ class DatastoreProfile2Json(pydantic.BaseModel):
106
96
  @staticmethod
107
97
  def get_json_public(profile: DatastoreProfile) -> str:
108
98
  return DatastoreProfile2Json._to_json(
109
- {k: v for k, v in profile.dict().items() if not isinstance(v, dict)}
99
+ {
100
+ k: v
101
+ for k, v in profile.dict().items()
102
+ if not str(k) in profile._private_attributes
103
+ }
110
104
  )
111
105
 
112
106
  @staticmethod
113
107
  def get_json_private(profile: DatastoreProfile) -> str:
114
108
  return DatastoreProfile2Json._to_json(
115
- {k: v for k, v in profile.dict().items() if isinstance(v, dict)}
109
+ {
110
+ k: v
111
+ for k, v in profile.dict().items()
112
+ if str(k) in profile._private_attributes
113
+ }
116
114
  )
117
115
 
118
116
  @staticmethod
@@ -123,9 +121,18 @@ class DatastoreProfile2Json(pydantic.BaseModel):
123
121
  decoded_dict = {
124
122
  k: base64.b64decode(str(v).encode()).decode() for k, v in attributes.items()
125
123
  }
124
+
125
+ def safe_literal_eval(value):
126
+ try:
127
+ return ast.literal_eval(value)
128
+ except (ValueError, SyntaxError):
129
+ return value
130
+
131
+ decoded_dict = {k: safe_literal_eval(v) for k, v in decoded_dict.items()}
126
132
  datastore_type = decoded_dict.get("type")
127
- if datastore_type == "redis":
128
- return DatastoreProfileRedis.parse_obj(decoded_dict)
133
+ ds_profile_factory = {"redis": DatastoreProfileRedis}
134
+ if datastore_type in ds_profile_factory:
135
+ return ds_profile_factory[datastore_type].parse_obj(decoded_dict)
129
136
  else:
130
137
  if datastore_type:
131
138
  reason = f"unexpected type '{decoded_dict['type']}'"
@@ -148,6 +155,11 @@ def datastore_profile_read(url):
148
155
  public_profile = mlrun.db.get_run_db().get_datastore_profile(
149
156
  profile_name, project_name
150
157
  )
158
+ if not public_profile:
159
+ raise mlrun.errors.MLRunInvalidArgumentError(
160
+ f"Failed to fetch datastore profile '{url}' "
161
+ )
162
+
151
163
  project_ds_name_private = DatastoreProfile.generate_secret_key(
152
164
  profile_name, project_name
153
165
  )
@@ -82,9 +82,10 @@ class DatabricksFileSystemDisableCache(DatabricksFileSystem):
82
82
  # dbfs objects will be represented with the following URL: dbfs://<path>
83
83
  class DBFSStore(DataStore):
84
84
  def __init__(self, parent, schema, name, endpoint="", secrets: dict = None):
85
- if not endpoint:
86
- endpoint = mlrun.get_secret_or_env("DATABRICKS_HOST")
87
85
  super().__init__(parent, name, schema, endpoint, secrets=secrets)
86
+ if not endpoint:
87
+ endpoint = self._get_secret_or_env("DATABRICKS_HOST")
88
+ self.endpoint = endpoint
88
89
  self.get_filesystem(silent=False)
89
90
 
90
91
  def get_filesystem(self, silent=True):
@@ -96,7 +97,7 @@ class DBFSStore(DataStore):
96
97
  def get_storage_options(self):
97
98
  return dict(
98
99
  token=self._get_secret_or_env("DATABRICKS_TOKEN"),
99
- instance=mlrun.get_secret_or_env("DATABRICKS_HOST"),
100
+ instance=self._get_secret_or_env("DATABRICKS_HOST"),
100
101
  )
101
102
 
102
103
  def _verify_filesystem_and_key(self, key: str):
@@ -111,12 +112,12 @@ class DBFSStore(DataStore):
111
112
 
112
113
  def get(self, key: str, size=None, offset=0) -> bytes:
113
114
  self._verify_filesystem_and_key(key)
114
- if size is not None and size <= 0:
115
- raise mlrun.errors.MLRunInvalidArgumentError(
116
- "size cannot be negative or zero"
117
- )
115
+ if size is not None and size < 0:
116
+ raise mlrun.errors.MLRunInvalidArgumentError("size cannot be negative")
117
+ if offset is None:
118
+ raise mlrun.errors.MLRunInvalidArgumentError("offset cannot be None")
118
119
  start = offset or None
119
- end = offset + size if size is not None else None
120
+ end = offset + size if size else None
120
121
  return self._filesystem.cat_file(key, start=start, end=end)
121
122
 
122
123
  def put(self, key, data, append=False):
mlrun/datastore/redis.py CHANGED
@@ -36,6 +36,12 @@ class RedisStore(DataStore):
36
36
  self.endpoint = self.endpoint or mlrun.mlconf.redis.url
37
37
  if schema == "ds":
38
38
  datastore_profile = datastore_profile_read(name)
39
+ if not datastore_profile:
40
+ raise ValueError(f"Failed to load datastore profile '{name}'")
41
+ if datastore_profile.type != "redis":
42
+ raise ValueError(
43
+ f"Trying to use profile of type '{datastore_profile.type}' as redis datastore"
44
+ )
39
45
  self._redis_url = datastore_profile.url_with_credentials()
40
46
  self.secure = datastore_profile.is_secured()
41
47
  else:
@@ -600,7 +600,12 @@ class BaseStoreTarget(DataTargetBase):
600
600
 
601
601
  def get_target_path(self):
602
602
  path_object = self._target_path_object
603
- return path_object.get_absolute_path() if path_object else None
603
+ project_name = self._resource.metadata.project if self._resource else None
604
+ return (
605
+ path_object.get_absolute_path(project_name=project_name)
606
+ if path_object
607
+ else None
608
+ )
604
609
 
605
610
  def get_target_path_with_credentials(self):
606
611
  return self.get_target_path()
@@ -1174,6 +1179,12 @@ class RedisNoSqlTarget(NoSqlBaseTarget):
1174
1179
  endpoint = endpoint or mlrun.mlconf.redis.url
1175
1180
  if endpoint.startswith("ds"):
1176
1181
  datastore_profile = datastore_profile_read(endpoint)
1182
+ if not datastore_profile:
1183
+ raise ValueError(f"Failed to load datastore profile '{endpoint}'")
1184
+ if datastore_profile.type != "redis":
1185
+ raise ValueError(
1186
+ f"Trying to use profile of type '{datastore_profile.type}' as redis datastore"
1187
+ )
1177
1188
  endpoint = datastore_profile.url_with_credentials()
1178
1189
  else:
1179
1190
  parsed_endpoint = urlparse(endpoint)
mlrun/db/base.py CHANGED
@@ -649,7 +649,7 @@ class RunDBInterface(ABC):
649
649
  ) -> mlrun.common.schemas.DatastoreProfile:
650
650
  pass
651
651
 
652
- def list_datastore_profile(
652
+ def list_datastore_profiles(
653
653
  self, project: str
654
654
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
655
655
  pass
mlrun/db/factory.py CHANGED
@@ -54,6 +54,9 @@ class RunDBFactory(
54
54
  self._run_db = self._rundb_container.nop(url)
55
55
 
56
56
  else:
57
+ # TODO: this practically makes the SQLRunDB a singleton, which mean that its session is shared, needs
58
+ # to be refreshed frequently and cannot be used concurrently.
59
+ # The SQLRunDB should always get its session from the FastAPI dependency injection.
57
60
  self._run_db = self._rundb_container.run_db(url)
58
61
 
59
62
  self._run_db.connect(secrets=secrets)
mlrun/db/httpdb.py CHANGED
@@ -13,7 +13,6 @@
13
13
  # limitations under the License.
14
14
  import enum
15
15
  import http
16
- import json
17
16
  import re
18
17
  import tempfile
19
18
  import time
@@ -111,7 +110,7 @@ class HTTPRunDB(RunDBInterface):
111
110
 
112
111
  def __init__(self, url):
113
112
  self.server_version = ""
114
- self._session = None
113
+ self.session = None
115
114
  self._wait_for_project_terminal_state_retry_interval = 3
116
115
  self._wait_for_background_task_terminal_state_retry_interval = 3
117
116
  self._wait_for_project_deletion_interval = 3
@@ -244,12 +243,12 @@ class HTTPRunDB(RunDBInterface):
244
243
  dict_[key] = dict_[key].value
245
244
 
246
245
  # if the method is POST, we need to update the session with the appropriate retry policy
247
- if not self._session or method == "POST":
246
+ if not self.session or method == "POST":
248
247
  retry_on_post = self._is_retry_on_post_allowed(method, path)
249
- self._session = self._init_session(retry_on_post)
248
+ self.session = self._init_session(retry_on_post)
250
249
 
251
250
  try:
252
- response = self._session.request(
251
+ response = self.session.request(
253
252
  method, url, timeout=timeout, verify=False, **kw
254
253
  )
255
254
  except requests.RequestException as exc:
@@ -440,6 +439,10 @@ class HTTPRunDB(RunDBInterface):
440
439
  server_cfg.get("model_endpoint_monitoring_store_type")
441
440
  or config.model_endpoint_monitoring.store_type
442
441
  )
442
+ config.model_endpoint_monitoring.endpoint_store_connection = (
443
+ server_cfg.get("model_endpoint_monitoring_endpoint_store_connection")
444
+ or config.model_endpoint_monitoring.endpoint_store_connection
445
+ )
443
446
  config.packagers = server_cfg.get("packagers") or config.packagers
444
447
  server_data_prefixes = server_cfg.get("feature_store_data_prefixes") or {}
445
448
  for prefix in ["default", "nosql", "redisnosql"]:
@@ -1673,7 +1676,6 @@ class HTTPRunDB(RunDBInterface):
1673
1676
  order,
1674
1677
  max_partitions=None,
1675
1678
  ):
1676
-
1677
1679
  partition_params = {
1678
1680
  "partition-by": partition_by,
1679
1681
  "rows-per-partition": rows_per_partition,
@@ -2186,7 +2188,6 @@ class HTTPRunDB(RunDBInterface):
2186
2188
  error_message = f"Failed listing projects, query: {params}"
2187
2189
  response = self.api_call("GET", "projects", error_message, params=params)
2188
2190
  if format_ == mlrun.common.schemas.ProjectsFormat.name_only:
2189
-
2190
2191
  # projects is just a list of strings
2191
2192
  return response.json()["projects"]
2192
2193
 
@@ -3299,11 +3300,11 @@ class HTTPRunDB(RunDBInterface):
3299
3300
  self, name: str, project: str
3300
3301
  ) -> Optional[mlrun.common.schemas.DatastoreProfile]:
3301
3302
  project = project or config.default_project
3302
- path = self._path_of("projects", project, "datastore_profiles") + f"/{name}"
3303
+ path = self._path_of("projects", project, "datastore-profiles") + f"/{name}"
3303
3304
 
3304
3305
  res = self.api_call(method="GET", path=path)
3305
- if res and res._content:
3306
- public_wrapper = json.loads(res._content)
3306
+ if res:
3307
+ public_wrapper = res.json()
3307
3308
  datastore = DatastoreProfile2Json.create_from_json(
3308
3309
  public_json=public_wrapper["body"]
3309
3310
  )
@@ -3313,7 +3314,7 @@ class HTTPRunDB(RunDBInterface):
3313
3314
  def delete_datastore_profile(self, name: str, project: str):
3314
3315
  pass
3315
3316
 
3316
- def list_datastore_profile(
3317
+ def list_datastore_profiles(
3317
3318
  self, project: str
3318
3319
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
3319
3320
  pass
@@ -3326,9 +3327,9 @@ class HTTPRunDB(RunDBInterface):
3326
3327
  :returns: None
3327
3328
  """
3328
3329
  project = project or config.default_project
3329
- path = self._path_of("projects", project, "datastore_profiles")
3330
+ path = self._path_of("projects", project, "datastore-profiles")
3330
3331
 
3331
- self.api_call(method="PUT", path=path, body=json.dumps(profile.dict()))
3332
+ self.api_call(method="PUT", path=path, json=profile.dict())
3332
3333
 
3333
3334
 
3334
3335
  def _as_json(obj):
mlrun/db/nopdb.py CHANGED
@@ -505,7 +505,7 @@ class NopDB(RunDBInterface):
505
505
  def delete_datastore_profile(self, name: str, project: str):
506
506
  pass
507
507
 
508
- def list_datastore_profile(
508
+ def list_datastore_profiles(
509
509
  self, project: str
510
510
  ) -> List[mlrun.common.schemas.DatastoreProfile]:
511
511
  pass
@@ -970,7 +970,10 @@ def _ingest_with_spark(
970
970
  max_time = source.start_time
971
971
  for target in featureset.status.targets:
972
972
  featureset.status.update_last_written_for_target(
973
- target.get_path().get_absolute_path(), max_time
973
+ target.get_path().get_absolute_path(
974
+ project_name=featureset.metadata.project
975
+ ),
976
+ max_time,
974
977
  )
975
978
 
976
979
  _post_ingestion(mlrun_context, featureset, spark)
@@ -432,7 +432,9 @@ class FeatureSet(ModelObj):
432
432
  target = get_online_target(self, name)
433
433
 
434
434
  if target:
435
- return target.get_path().get_absolute_path()
435
+ return target.get_path().get_absolute_path(
436
+ project_name=self.metadata.project
437
+ )
436
438
 
437
439
  def set_targets(
438
440
  self,
@@ -140,6 +140,7 @@ def featureset_initializer(server):
140
140
  featureset,
141
141
  targets=targets,
142
142
  source=source,
143
+ context=context,
143
144
  )
144
145
  featureset.save()
145
146
  server.graph = graph
mlrun/kfpops.py CHANGED
@@ -731,15 +731,21 @@ def generate_kfp_dag_and_resolve_project(run, project=None):
731
731
  return dag, project, workflow["status"].get("message", "")
732
732
 
733
733
 
734
- def format_summary_from_kfp_run(kfp_run, project=None):
734
+ def format_summary_from_kfp_run(
735
+ kfp_run, project=None, run_db: "mlrun.db.RunDBInterface" = None
736
+ ):
735
737
  override_project = project if project and project != "*" else None
736
738
  dag, project, message = generate_kfp_dag_and_resolve_project(
737
739
  kfp_run, override_project
738
740
  )
739
741
  run_id = get_in(kfp_run, "run.id")
740
742
 
743
+ # run db parameter allows us to use the same db session for the whole flow and avoid session isolation issues
744
+ if not run_db:
745
+ run_db = mlrun.db.get_run_db()
746
+
741
747
  # enrich DAG with mlrun run info
742
- runs = mlrun.db.get_run_db().list_runs(project=project, labels=f"workflow={run_id}")
748
+ runs = run_db.list_runs(project=project, labels=f"workflow={run_id}")
743
749
 
744
750
  for run in runs:
745
751
  step = get_in(run, ["metadata", "labels", "mlrun/runner-pod"])
mlrun/launcher/base.py CHANGED
@@ -330,7 +330,7 @@ class BaseLauncher(abc.ABC):
330
330
 
331
331
  if run.spec.output_path:
332
332
  run.spec.output_path = run.spec.output_path.replace("{{run.uid}}", meta.uid)
333
- run.spec.output_path = mlrun.utils.helpers.fill_artifact_path_template(
333
+ run.spec.output_path = mlrun.utils.helpers.fill_project_path_template(
334
334
  run.spec.output_path, run.metadata.project
335
335
  )
336
336
 
mlrun/model.py CHANGED
@@ -32,6 +32,7 @@ import mlrun.common.schemas.notification
32
32
  from .utils import (
33
33
  dict_to_json,
34
34
  dict_to_yaml,
35
+ fill_project_path_template,
35
36
  get_artifact_target,
36
37
  is_legacy_artifact,
37
38
  logger,
@@ -1580,11 +1581,12 @@ class TargetPathObject:
1580
1581
  def get_templated_path(self):
1581
1582
  return self.full_path_template
1582
1583
 
1583
- def get_absolute_path(self):
1584
- if self.run_id:
1585
- return self.full_path_template.format(run_id=self.run_id)
1586
- else:
1587
- return self.full_path_template
1584
+ def get_absolute_path(self, project_name=None):
1585
+ path = fill_project_path_template(
1586
+ artifact_path=self.full_path_template,
1587
+ project=project_name,
1588
+ )
1589
+ return path.format(run_id=self.run_id) if self.run_id else path
1588
1590
 
1589
1591
 
1590
1592
  class DataSource(ModelObj):
@@ -375,7 +375,7 @@ def get_db_function(project, key) -> mlrun.runtimes.BaseRuntime:
375
375
 
376
376
 
377
377
  def enrich_function_object(
378
- project, function, decorator=None, copy_function=True
378
+ project, function, decorator=None, copy_function=True, try_auto_mount=True
379
379
  ) -> mlrun.runtimes.BaseRuntime:
380
380
  if hasattr(function, "_enriched"):
381
381
  return function
@@ -409,11 +409,12 @@ def enrich_function_object(
409
409
  if decorator:
410
410
  decorator(f)
411
411
 
412
- if (
413
- decorator and AutoMountType.is_auto_modifier(decorator)
414
- ) or project.spec.disable_auto_mount:
415
- f.spec.disable_auto_mount = True
416
- f.try_auto_mount_based_on_config()
412
+ if try_auto_mount:
413
+ if (
414
+ decorator and AutoMountType.is_auto_modifier(decorator)
415
+ ) or project.spec.disable_auto_mount:
416
+ f.spec.disable_auto_mount = True
417
+ f.try_auto_mount_based_on_config()
417
418
 
418
419
  return f
419
420
 
mlrun/projects/project.py CHANGED
@@ -1356,7 +1356,7 @@ class MlrunProject(ModelObj):
1356
1356
  def register_artifacts(self):
1357
1357
  """register the artifacts in the MLRun DB (under this project)"""
1358
1358
  artifact_manager = self._get_artifact_manager()
1359
- artifact_path = mlrun.utils.helpers.fill_artifact_path_template(
1359
+ artifact_path = mlrun.utils.helpers.fill_project_path_template(
1360
1360
  self.spec.artifact_path or mlrun.mlconf.artifact_path, self.metadata.name
1361
1361
  )
1362
1362
  # TODO: To correctly maintain the list of artifacts from an exported project,
@@ -1476,7 +1476,7 @@ class MlrunProject(ModelObj):
1476
1476
  artifact_path = extend_artifact_path(
1477
1477
  artifact_path, self.spec.artifact_path or mlrun.mlconf.artifact_path
1478
1478
  )
1479
- artifact_path = mlrun.utils.helpers.fill_artifact_path_template(
1479
+ artifact_path = mlrun.utils.helpers.fill_project_path_template(
1480
1480
  artifact_path, self.metadata.name
1481
1481
  )
1482
1482
  producer = ArtifactProducer(
mlrun/run.py CHANGED
@@ -420,7 +420,7 @@ def get_or_create_ctx(
420
420
  if not newspec:
421
421
  newspec = {}
422
422
  if upload_artifacts:
423
- artifact_path = mlrun.utils.helpers.fill_artifact_path_template(
423
+ artifact_path = mlrun.utils.helpers.fill_project_path_template(
424
424
  mlconf.artifact_path, project or mlconf.default_project
425
425
  )
426
426
  update_in(newspec, ["spec", run_keys.output_path], artifact_path)
@@ -120,6 +120,7 @@ class RuntimeKinds(object):
120
120
  RuntimeKinds.spark,
121
121
  RuntimeKinds.remotespark,
122
122
  RuntimeKinds.mpijob,
123
+ RuntimeKinds.databricks,
123
124
  ]
124
125
 
125
126
  @staticmethod
mlrun/utils/helpers.py CHANGED
@@ -1269,7 +1269,7 @@ def calculate_dataframe_hash(dataframe: pandas.DataFrame):
1269
1269
  return hashlib.sha1(pandas.util.hash_pandas_object(dataframe).values).hexdigest()
1270
1270
 
1271
1271
 
1272
- def fill_artifact_path_template(artifact_path, project):
1272
+ def fill_project_path_template(artifact_path, project):
1273
1273
  # Supporting {{project}} is new, in certain setup configuration the default artifact path has the old
1274
1274
  # {{run.project}} so we're supporting it too for backwards compatibility
1275
1275
  if artifact_path and (
@@ -41,6 +41,7 @@ class WebhookNotification(NotificationBase):
41
41
  method = self.params.get("method", "post").lower()
42
42
  headers = self.params.get("headers", {})
43
43
  override_body = self.params.get("override_body", None)
44
+ verify_ssl = self.params.get("verify_ssl", None)
44
45
 
45
46
  request_body = {
46
47
  "message": message,
@@ -54,8 +55,15 @@ class WebhookNotification(NotificationBase):
54
55
  if override_body:
55
56
  request_body = override_body
56
57
 
58
+ # Specify the `verify_ssl` parameter value only for HTTPS urls.
59
+ # The `ClientSession` allows using `ssl=None` for the default SSL check,
60
+ # and `ssl=False` to skip SSL certificate validation.
61
+ # We maintain the default as `None`, so if the user sets `verify_ssl=True`,
62
+ # we automatically handle it as `ssl=None` for their convenience.
63
+ verify_ssl = verify_ssl and None if url.startswith("https") else None
64
+
57
65
  async with aiohttp.ClientSession() as session:
58
66
  response = await getattr(session, method)(
59
- url, headers=headers, json=request_body
67
+ url, headers=headers, json=request_body, ssl=verify_ssl
60
68
  )
61
69
  response.raise_for_status()
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "e73644c35212e29056e0a9f60bd0e422437cb2dc",
3
- "version": "1.5.0-rc4"
2
+ "git_commit": "be0afaed9da28226cde7a541f8099ac40eb1b9a3",
3
+ "version": "1.5.0-rc6"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.5.0rc4
3
+ Version: 1.5.0rc6
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -29,7 +29,7 @@ Requires-Dist: aiohttp-retry ~=2.8
29
29
  Requires-Dist: click ~=8.0.0
30
30
  Requires-Dist: kfp <1.8.14,~=1.8.0
31
31
  Requires-Dist: nest-asyncio ~=1.0
32
- Requires-Dist: ipython <9.0,>=7.0
32
+ Requires-Dist: ipython ~=8.0
33
33
  Requires-Dist: nuclio-jupyter ~=0.9.11
34
34
  Requires-Dist: numpy <1.23.0,>=1.16.5
35
35
  Requires-Dist: pandas <1.5.0,~=1.2
@@ -52,7 +52,6 @@ Requires-Dist: fsspec <2023.7,>=2023.1
52
52
  Requires-Dist: v3iofs ~=0.1.15
53
53
  Requires-Dist: storey ~=1.5.3
54
54
  Requires-Dist: deepdiff ~=5.0
55
- Requires-Dist: pymysql ~=1.0
56
55
  Requires-Dist: inflection ~=0.5.0
57
56
  Requires-Dist: python-dotenv ~=0.17.0
58
57
  Requires-Dist: setuptools ~=65.5
@@ -84,13 +83,14 @@ Requires-Dist: s3fs <2023.7,>=2023.1 ; extra == 'all'
84
83
  Provides-Extra: api
85
84
  Requires-Dist: uvicorn ~=0.20.0 ; extra == 'api'
86
85
  Requires-Dist: dask-kubernetes ~=0.11.0 ; extra == 'api'
87
- Requires-Dist: apscheduler ~=3.6 ; extra == 'api'
86
+ Requires-Dist: apscheduler !=3.10.2,~=3.6 ; extra == 'api'
88
87
  Requires-Dist: sqlite3-to-mysql ~=1.4 ; extra == 'api'
89
88
  Requires-Dist: objgraph ~=3.5 ; extra == 'api'
90
89
  Requires-Dist: igz-mgmt ~=0.0.10 ; extra == 'api'
91
90
  Requires-Dist: humanfriendly ~=9.2 ; extra == 'api'
92
91
  Requires-Dist: fastapi ~=0.95.2 ; extra == 'api'
93
92
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'api'
93
+ Requires-Dist: pymysql ~=1.0 ; extra == 'api'
94
94
  Provides-Extra: azure-blob-storage
95
95
  Requires-Dist: msrest ~=0.6.21 ; extra == 'azure-blob-storage'
96
96
  Requires-Dist: azure-core ~=1.24 ; extra == 'azure-blob-storage'
@@ -125,7 +125,7 @@ Requires-Dist: s3fs <2023.7,>=2023.1 ; extra == 'complete'
125
125
  Provides-Extra: complete-api
126
126
  Requires-Dist: adlfs <2023.5,>=2022.2 ; extra == 'complete-api'
127
127
  Requires-Dist: aiobotocore <2.6,>=2.4.2 ; extra == 'complete-api'
128
- Requires-Dist: apscheduler ~=3.6 ; extra == 'complete-api'
128
+ Requires-Dist: apscheduler !=3.10.2,~=3.6 ; extra == 'complete-api'
129
129
  Requires-Dist: avro ~=1.11 ; extra == 'complete-api'
130
130
  Requires-Dist: azure-core ~=1.24 ; extra == 'complete-api'
131
131
  Requires-Dist: azure-identity ~=1.5 ; extra == 'complete-api'
@@ -144,6 +144,7 @@ Requires-Dist: kafka-python ~=2.0 ; extra == 'complete-api'
144
144
  Requires-Dist: msrest ~=0.6.21 ; extra == 'complete-api'
145
145
  Requires-Dist: objgraph ~=3.5 ; extra == 'complete-api'
146
146
  Requires-Dist: plotly <5.12.0,~=5.4 ; extra == 'complete-api'
147
+ Requires-Dist: pymysql ~=1.0 ; extra == 'complete-api'
147
148
  Requires-Dist: pyopenssl >=23 ; extra == 'complete-api'
148
149
  Requires-Dist: redis ~=4.3 ; extra == 'complete-api'
149
150
  Requires-Dist: s3fs <2023.7,>=2023.1 ; extra == 'complete-api'