mlrun 1.6.2rc5__py3-none-any.whl → 1.6.2rc6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of mlrun might be problematic. Click here for more details.

Files changed (48) hide show
  1. mlrun/common/db/sql_session.py +0 -3
  2. mlrun/common/model_monitoring/helpers.py +2 -4
  3. mlrun/common/schemas/__init__.py +0 -1
  4. mlrun/common/schemas/project.py +0 -2
  5. mlrun/config.py +11 -30
  6. mlrun/datastore/azure_blob.py +9 -9
  7. mlrun/datastore/base.py +44 -22
  8. mlrun/datastore/google_cloud_storage.py +6 -6
  9. mlrun/datastore/v3io.py +46 -70
  10. mlrun/db/base.py +0 -18
  11. mlrun/db/httpdb.py +25 -28
  12. mlrun/execution.py +3 -3
  13. mlrun/frameworks/tf_keras/callbacks/logging_callback.py +3 -3
  14. mlrun/frameworks/tf_keras/model_handler.py +7 -7
  15. mlrun/k8s_utils.py +5 -10
  16. mlrun/kfpops.py +10 -19
  17. mlrun/model.py +0 -5
  18. mlrun/model_monitoring/api.py +8 -8
  19. mlrun/model_monitoring/batch.py +1 -1
  20. mlrun/model_monitoring/stores/kv_model_endpoint_store.py +13 -13
  21. mlrun/model_monitoring/stores/sql_model_endpoint_store.py +1 -0
  22. mlrun/package/packagers/pandas_packagers.py +3 -3
  23. mlrun/package/utils/_archiver.py +1 -3
  24. mlrun/platforms/iguazio.py +65 -6
  25. mlrun/projects/pipelines.py +11 -21
  26. mlrun/projects/project.py +46 -65
  27. mlrun/runtimes/base.py +1 -24
  28. mlrun/runtimes/function.py +9 -9
  29. mlrun/runtimes/kubejob.py +3 -5
  30. mlrun/runtimes/local.py +2 -2
  31. mlrun/runtimes/mpijob/abstract.py +6 -6
  32. mlrun/runtimes/pod.py +3 -3
  33. mlrun/runtimes/serving.py +3 -3
  34. mlrun/runtimes/sparkjob/spark3job.py +3 -3
  35. mlrun/serving/remote.py +2 -4
  36. mlrun/utils/async_http.py +3 -3
  37. mlrun/utils/helpers.py +0 -8
  38. mlrun/utils/http.py +3 -3
  39. mlrun/utils/logger.py +2 -2
  40. mlrun/utils/notifications/notification_pusher.py +6 -6
  41. mlrun/utils/version/version.json +2 -2
  42. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/METADATA +16 -14
  43. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/RECORD +47 -48
  44. mlrun/common/schemas/common.py +0 -40
  45. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/LICENSE +0 -0
  46. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/WHEEL +0 -0
  47. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/entry_points.txt +0 -0
  48. {mlrun-1.6.2rc5.dist-info → mlrun-1.6.2rc6.dist-info}/top_level.txt +0 -0
mlrun/projects/project.py CHANGED
@@ -24,7 +24,7 @@ import typing
24
24
  import uuid
25
25
  import warnings
26
26
  import zipfile
27
- from os import environ, makedirs, path
27
+ from os import environ, makedirs, path, remove
28
28
  from typing import Callable, Dict, List, Optional, Union
29
29
 
30
30
  import dotenv
@@ -605,14 +605,9 @@ def _load_project_dir(context, name="", subpath=""):
605
605
  # If there is a setup script do not force having project.yaml file
606
606
  project = MlrunProject()
607
607
  else:
608
- message = "Project or function YAML not found in path"
609
- logger.error(
610
- message,
611
- context=context,
612
- name=name,
613
- subpath=subpath,
608
+ raise mlrun.errors.MLRunNotFoundError(
609
+ "project or function YAML not found in path"
614
610
  )
615
- raise mlrun.errors.MLRunNotFoundError(message)
616
611
 
617
612
  project.spec.context = context
618
613
  project.metadata.name = name or project.metadata.name
@@ -1240,20 +1235,20 @@ class MlrunProject(ModelObj):
1240
1235
  self,
1241
1236
  name,
1242
1237
  workflow_path: str,
1243
- embed: bool = False,
1244
- engine: Optional[str] = None,
1245
- args_schema: list[EntrypointParam] = None,
1246
- handler: Optional[str] = None,
1238
+ embed=False,
1239
+ engine=None,
1240
+ args_schema: typing.List[EntrypointParam] = None,
1241
+ handler=None,
1247
1242
  schedule: typing.Union[str, mlrun.common.schemas.ScheduleCronTrigger] = None,
1248
- ttl: Optional[int] = None,
1249
- image: Optional[str] = None,
1243
+ ttl=None,
1244
+ image: str = None,
1250
1245
  **args,
1251
1246
  ):
1252
1247
  """Add or update a workflow, specify a name and the code path
1253
1248
 
1254
1249
  :param name: Name of the workflow
1255
1250
  :param workflow_path: URL (remote) / Path (absolute or relative to the project code path i.e.
1256
- <project.spec.get_code_path()>/<workflow_path>) for the workflow file.
1251
+ <project.spec.get_code_path()>/<workflow_path>) for the workflow file.
1257
1252
  :param embed: Add the workflow code into the project.yaml
1258
1253
  :param engine: Workflow processing engine ("kfp", "local", "remote" or "remote:local")
1259
1254
  :param args_schema: List of arg schema definitions (:py:class`~mlrun.model.EntrypointParam`)
@@ -2600,45 +2595,40 @@ class MlrunProject(ModelObj):
2600
2595
  cleanup_ttl: int = None,
2601
2596
  notifications: typing.List[mlrun.model.Notification] = None,
2602
2597
  ) -> _PipelineRunStatus:
2603
- """Run a workflow using kubeflow pipelines
2598
+ """run a workflow using kubeflow pipelines
2604
2599
 
2605
- :param name: Name of the workflow
2600
+ :param name: name of the workflow
2606
2601
  :param workflow_path:
2607
- URL to a workflow file, if not a project workflow
2602
+ url to a workflow file, if not a project workflow
2608
2603
  :param arguments:
2609
- Kubeflow pipelines arguments (parameters)
2604
+ kubeflow pipelines arguments (parameters)
2610
2605
  :param artifact_path:
2611
- Target path/url for workflow artifacts, the string
2606
+ target path/url for workflow artifacts, the string
2612
2607
  '{{workflow.uid}}' will be replaced by workflow id
2613
2608
  :param workflow_handler:
2614
- Workflow function handler (for running workflow function directly)
2615
- :param namespace: Kubernetes namespace if other than default
2616
- :param sync: Force functions sync before run
2617
- :param watch: Wait for pipeline completion
2618
- :param dirty: Allow running the workflow when the git repo is dirty
2619
- :param engine: Workflow engine running the workflow.
2620
- Supported values are 'kfp' (default), 'local' or 'remote'.
2621
- For setting engine for remote running use 'remote:local' or 'remote:kfp'.
2622
- :param local: Run local pipeline with local functions (set local=True in function.run())
2609
+ workflow function handler (for running workflow function directly)
2610
+ :param namespace: kubernetes namespace if other than default
2611
+ :param sync: force functions sync before run
2612
+ :param watch: wait for pipeline completion
2613
+ :param dirty: allow running the workflow when the git repo is dirty
2614
+ :param engine: workflow engine running the workflow.
2615
+ supported values are 'kfp' (default), 'local' or 'remote'.
2616
+ for setting engine for remote running use 'remote:local' or 'remote:kfp'.
2617
+ :param local: run local pipeline with local functions (set local=True in function.run())
2623
2618
  :param schedule: ScheduleCronTrigger class instance or a standard crontab expression string
2624
2619
  (which will be converted to the class using its `from_crontab` constructor),
2625
2620
  see this link for help:
2626
2621
  https://apscheduler.readthedocs.io/en/3.x/modules/triggers/cron.html#module-apscheduler.triggers.cron
2627
2622
  for using the pre-defined workflow's schedule, set `schedule=True`
2628
- :param timeout: Timeout in seconds to wait for pipeline completion (watch will be activated)
2629
- :param source: Source to use instead of the actual `project.spec.source` (used when engine is remote).
2630
- Can be a one of:
2631
- 1. Remote URL which is loaded dynamically to the workflow runner.
2632
- 2. A path to the project's context on the workflow runner's image.
2633
- Path can be absolute or relative to `project.spec.build.source_code_target_dir` if defined
2634
- (enriched when building a project image with source, see `MlrunProject.build_image`).
2635
- For other engines the source is used to validate that the code is up-to-date.
2623
+ :param timeout: timeout in seconds to wait for pipeline completion (watch will be activated)
2624
+ :param source: remote source to use instead of the actual `project.spec.source` (used when engine is remote).
2625
+ for other engines the source is to validate that the code is up-to-date
2636
2626
  :param cleanup_ttl:
2637
- Pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2638
- Workflow and all its resources are deleted)
2627
+ pipeline cleanup ttl in secs (time to wait after workflow completion, at which point the
2628
+ workflow and all its resources are deleted)
2639
2629
  :param notifications:
2640
- List of notifications to send for workflow completion
2641
- :returns: Run id
2630
+ list of notifications to send for workflow completion
2631
+ :returns: run id
2642
2632
  """
2643
2633
 
2644
2634
  arguments = arguments or {}
@@ -2785,7 +2775,7 @@ class MlrunProject(ModelObj):
2785
2775
  def export(self, filepath=None, include_files: str = None):
2786
2776
  """save the project object into a yaml file or zip archive (default to project.yaml)
2787
2777
 
2788
- By default, the project object is exported to a yaml file, when the filepath suffix is '.zip'
2778
+ By default the project object is exported to a yaml file, when the filepath suffix is '.zip'
2789
2779
  the project context dir (code files) are also copied into the zip, the archive path can include
2790
2780
  DataItem urls (for remote object storage, e.g. s3://<bucket>/<path>).
2791
2781
 
@@ -2810,19 +2800,19 @@ class MlrunProject(ModelObj):
2810
2800
 
2811
2801
  if archive_code:
2812
2802
  files_filter = include_files or "**"
2813
- with tempfile.NamedTemporaryFile(suffix=".zip") as f:
2814
- remote_file = "://" in filepath
2815
- fpath = f.name if remote_file else filepath
2816
- with zipfile.ZipFile(fpath, "w") as zipf:
2817
- for file_path in glob.iglob(
2818
- f"{project_dir}/{files_filter}", recursive=True
2819
- ):
2820
- write_path = pathlib.Path(file_path)
2821
- zipf.write(
2822
- write_path, arcname=write_path.relative_to(project_dir)
2823
- )
2824
- if remote_file:
2825
- mlrun.get_dataitem(filepath).upload(zipf.filename)
2803
+ tmp_path = None
2804
+ if "://" in filepath:
2805
+ tmp_path = tempfile.mktemp(".zip")
2806
+ zipf = zipfile.ZipFile(tmp_path or filepath, "w")
2807
+ for file_path in glob.iglob(
2808
+ f"{project_dir}/{files_filter}", recursive=True
2809
+ ):
2810
+ write_path = pathlib.Path(file_path)
2811
+ zipf.write(write_path, arcname=write_path.relative_to(project_dir))
2812
+ zipf.close()
2813
+ if tmp_path:
2814
+ mlrun.get_dataitem(filepath).upload(tmp_path)
2815
+ remove(tmp_path)
2826
2816
 
2827
2817
  def set_model_monitoring_credentials(
2828
2818
  self,
@@ -3037,7 +3027,6 @@ class MlrunProject(ModelObj):
3037
3027
  requirements_file: str = None,
3038
3028
  builder_env: dict = None,
3039
3029
  extra_args: str = None,
3040
- source_code_target_dir: str = None,
3041
3030
  ):
3042
3031
  """specify builder configuration for the project
3043
3032
 
@@ -3058,8 +3047,6 @@ class MlrunProject(ModelObj):
3058
3047
  e.g. builder_env={"GIT_TOKEN": token}, does not work yet in KFP
3059
3048
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3060
3049
  e.g. extra_args="--skip-tls-verify --build-arg A=val"
3061
- :param source_code_target_dir: Path on the image where source code would be extracted
3062
- (by default `/home/mlrun_code`)
3063
3050
  """
3064
3051
  if not overwrite_build_params:
3065
3052
  # TODO: change overwrite_build_params default to True in 1.8.0
@@ -3083,7 +3070,6 @@ class MlrunProject(ModelObj):
3083
3070
  overwrite=overwrite_build_params,
3084
3071
  builder_env=builder_env,
3085
3072
  extra_args=extra_args,
3086
- source_code_target_dir=source_code_target_dir,
3087
3073
  )
3088
3074
 
3089
3075
  if set_as_default and image != self.default_image:
@@ -3130,7 +3116,7 @@ class MlrunProject(ModelObj):
3130
3116
  * False: The new params are merged with the existing
3131
3117
  * True: The existing params are replaced by the new ones
3132
3118
  :param extra_args: A string containing additional builder arguments in the format of command-line options,
3133
- e.g. extra_args="--skip-tls-verify --build-arg A=val"
3119
+ e.g. extra_args="--skip-tls-verify --build-arg A=val"r
3134
3120
  :param target_dir: Path on the image where source code would be extracted (by default `/home/mlrun_code`)
3135
3121
  """
3136
3122
  if not base_image:
@@ -3198,11 +3184,6 @@ class MlrunProject(ModelObj):
3198
3184
  force_build=True,
3199
3185
  )
3200
3186
 
3201
- # Get the enriched target dir from the function
3202
- self.spec.build.source_code_target_dir = (
3203
- function.spec.build.source_code_target_dir
3204
- )
3205
-
3206
3187
  try:
3207
3188
  mlrun.db.get_run_db(secrets=self._secrets).delete_function(
3208
3189
  name=function.metadata.name
mlrun/runtimes/base.py CHANGED
@@ -15,7 +15,6 @@ import enum
15
15
  import http
16
16
  import re
17
17
  import typing
18
- import warnings
19
18
  from base64 import b64encode
20
19
  from os import environ
21
20
  from typing import Callable, Dict, List, Optional, Union
@@ -125,7 +124,7 @@ class FunctionSpec(ModelObj):
125
124
  self.allow_empty_resources = None
126
125
  # the build.source is cloned/extracted to the specified clone_target_dir
127
126
  # if a relative path is specified, it will be enriched with a temp dir path
128
- self._clone_target_dir = clone_target_dir or None
127
+ self.clone_target_dir = clone_target_dir or ""
129
128
 
130
129
  @property
131
130
  def build(self) -> ImageBuilder:
@@ -135,28 +134,6 @@ class FunctionSpec(ModelObj):
135
134
  def build(self, build):
136
135
  self._build = self._verify_dict(build, "build", ImageBuilder)
137
136
 
138
- @property
139
- def clone_target_dir(self):
140
- # TODO: remove this property in 1.9.0
141
- if self.build.source_code_target_dir:
142
- warnings.warn(
143
- "The clone_target_dir attribute is deprecated in 1.6.2 and will be removed in 1.9.0. "
144
- "Use spec.build.source_code_target_dir instead.",
145
- FutureWarning,
146
- )
147
- return self.build.source_code_target_dir
148
-
149
- @clone_target_dir.setter
150
- def clone_target_dir(self, clone_target_dir):
151
- # TODO: remove this property in 1.9.0
152
- if clone_target_dir:
153
- warnings.warn(
154
- "The clone_target_dir attribute is deprecated in 1.6.2 and will be removed in 1.9.0. "
155
- "Use spec.build.source_code_target_dir instead.",
156
- FutureWarning,
157
- )
158
- self.build.source_code_target_dir = clone_target_dir
159
-
160
137
  def enrich_function_preemption_spec(self):
161
138
  pass
162
139
 
@@ -432,15 +432,15 @@ class RemoteRuntime(KubeResource):
432
432
  raise ValueError(
433
433
  "gateway timeout must be greater than the worker timeout"
434
434
  )
435
- annotations["nginx.ingress.kubernetes.io/proxy-connect-timeout"] = (
436
- f"{gateway_timeout}"
437
- )
438
- annotations["nginx.ingress.kubernetes.io/proxy-read-timeout"] = (
439
- f"{gateway_timeout}"
440
- )
441
- annotations["nginx.ingress.kubernetes.io/proxy-send-timeout"] = (
442
- f"{gateway_timeout}"
443
- )
435
+ annotations[
436
+ "nginx.ingress.kubernetes.io/proxy-connect-timeout"
437
+ ] = f"{gateway_timeout}"
438
+ annotations[
439
+ "nginx.ingress.kubernetes.io/proxy-read-timeout"
440
+ ] = f"{gateway_timeout}"
441
+ annotations[
442
+ "nginx.ingress.kubernetes.io/proxy-send-timeout"
443
+ ] = f"{gateway_timeout}"
444
444
 
445
445
  trigger = nuclio.HttpTrigger(
446
446
  workers=workers,
mlrun/runtimes/kubejob.py CHANGED
@@ -73,7 +73,7 @@ class KubejobRuntime(KubeResource):
73
73
  if workdir:
74
74
  self.spec.workdir = workdir
75
75
  if target_dir:
76
- self.spec.build.source_code_target_dir = target_dir
76
+ self.spec.clone_target_dir = target_dir
77
77
 
78
78
  self.spec.build.load_source_on_run = pull_at_runtime
79
79
  if (
@@ -232,10 +232,8 @@ class KubejobRuntime(KubeResource):
232
232
  self.spec.build.base_image = self.spec.build.base_image or get_in(
233
233
  data, "data.spec.build.base_image"
234
234
  )
235
- # Get the source target dir in case it was enriched due to loading source
236
- self.spec.build.source_code_target_dir = get_in(
237
- data, "data.spec.build.source_code_target_dir"
238
- ) or get_in(data, "data.spec.clone_target_dir")
235
+ # get the clone target dir in case it was enriched due to loading source
236
+ self.spec.clone_target_dir = get_in(data, "data.spec.clone_target_dir")
239
237
  ready = data.get("ready", False)
240
238
  if not ready:
241
239
  logger.info(
mlrun/runtimes/local.py CHANGED
@@ -218,7 +218,7 @@ class LocalRuntime(BaseRuntime, ParallelRunner):
218
218
  if workdir:
219
219
  self.spec.workdir = workdir
220
220
  if target_dir:
221
- self.spec.build.source_code_target_dir = target_dir
221
+ self.spec.clone_target_dir = target_dir
222
222
 
223
223
  def is_deployed(self):
224
224
  return True
@@ -240,7 +240,7 @@ class LocalRuntime(BaseRuntime, ParallelRunner):
240
240
  if self.spec.build.source and not hasattr(self, "_is_run_local"):
241
241
  target_dir = extract_source(
242
242
  self.spec.build.source,
243
- self.spec.build.source_code_target_dir,
243
+ self.spec.clone_target_dir,
244
244
  secrets=execution._secrets_manager,
245
245
  )
246
246
  if workdir and not workdir.startswith("/"):
@@ -196,13 +196,13 @@ class AbstractMPIJobRuntime(KubejobRuntime, abc.ABC):
196
196
  if steps_per_sample is not None:
197
197
  horovod_autotune_settings["autotune-steps-per-sample"] = steps_per_sample
198
198
  if bayes_opt_max_samples is not None:
199
- horovod_autotune_settings["autotune-bayes-opt-max-samples"] = (
200
- bayes_opt_max_samples
201
- )
199
+ horovod_autotune_settings[
200
+ "autotune-bayes-opt-max-samples"
201
+ ] = bayes_opt_max_samples
202
202
  if gaussian_process_noise is not None:
203
- horovod_autotune_settings["autotune-gaussian-process-noise"] = (
204
- gaussian_process_noise
205
- )
203
+ horovod_autotune_settings[
204
+ "autotune-gaussian-process-noise"
205
+ ] = gaussian_process_noise
206
206
 
207
207
  self.set_envs(horovod_autotune_settings)
208
208
 
mlrun/runtimes/pod.py CHANGED
@@ -430,9 +430,9 @@ class KubeResourceSpec(FunctionSpec):
430
430
  )
431
431
  is None
432
432
  ):
433
- resources[resource_requirement][resource_type] = (
434
- default_resources[resource_requirement][resource_type]
435
- )
433
+ resources[resource_requirement][
434
+ resource_type
435
+ ] = default_resources[resource_requirement][resource_type]
436
436
  # This enables the user to define that no defaults would be applied on the resources
437
437
  elif resources == {}:
438
438
  return resources
mlrun/runtimes/serving.py CHANGED
@@ -523,9 +523,9 @@ class ServingRuntime(RemoteRuntime):
523
523
  function_object.metadata.tag = self.metadata.tag
524
524
 
525
525
  function_object.metadata.labels = function_object.metadata.labels or {}
526
- function_object.metadata.labels["mlrun/parent-function"] = (
527
- self.metadata.name
528
- )
526
+ function_object.metadata.labels[
527
+ "mlrun/parent-function"
528
+ ] = self.metadata.name
529
529
  function_object._is_child_function = True
530
530
  if not function_object.spec.graph:
531
531
  # copy the current graph only if the child doesnt have a graph of his own
@@ -345,9 +345,9 @@ class Spark3JobSpec(KubeResourceSpec):
345
345
  )
346
346
  is None
347
347
  ):
348
- resources[resource_requirement][resource_type] = (
349
- default_resources[resource_requirement][resource_type]
350
- )
348
+ resources[resource_requirement][
349
+ resource_type
350
+ ] = default_resources[resource_requirement][resource_type]
351
351
  else:
352
352
  resources = default_resources
353
353
 
mlrun/serving/remote.py CHANGED
@@ -21,7 +21,6 @@ import storey
21
21
  from storey.flow import _ConcurrentJobExecution
22
22
 
23
23
  import mlrun
24
- import mlrun.config
25
24
  from mlrun.errors import err_to_str
26
25
  from mlrun.utils import logger
27
26
 
@@ -174,8 +173,7 @@ class RemoteStep(storey.SendToHttp):
174
173
  if not self._session:
175
174
  self._session = mlrun.utils.HTTPSessionWithRetry(
176
175
  self.retries,
177
- self.backoff_factor
178
- or mlrun.config.config.http_retry_defaults.backoff_factor,
176
+ self.backoff_factor or mlrun.mlconf.http_retry_defaults.backoff_factor,
179
177
  retry_on_exception=False,
180
178
  retry_on_status=self.retries > 0,
181
179
  retry_on_post=True,
@@ -187,7 +185,7 @@ class RemoteStep(storey.SendToHttp):
187
185
  resp = self._session.request(
188
186
  method,
189
187
  url,
190
- verify=mlrun.config.config.httpdb.http.verify,
188
+ verify=False,
191
189
  headers=headers,
192
190
  data=body,
193
191
  timeout=self.timeout,
mlrun/utils/async_http.py CHANGED
@@ -139,9 +139,9 @@ class _CustomRequestContext(_RequestContext):
139
139
 
140
140
  # enrich user agent
141
141
  # will help traceability and debugging
142
- headers[aiohttp.hdrs.USER_AGENT] = (
143
- f"{aiohttp.http.SERVER_SOFTWARE} mlrun/{config.version}"
144
- )
142
+ headers[
143
+ aiohttp.hdrs.USER_AGENT
144
+ ] = f"{aiohttp.http.SERVER_SOFTWARE} mlrun/{config.version}"
145
145
 
146
146
  response: typing.Optional[
147
147
  aiohttp.ClientResponse
mlrun/utils/helpers.py CHANGED
@@ -1622,11 +1622,3 @@ def get_local_file_schema() -> List:
1622
1622
  # The expression `list(string.ascii_lowercase)` generates a list of lowercase alphabets,
1623
1623
  # which corresponds to drive letters in Windows file paths such as `C:/Windows/path`.
1624
1624
  return ["file"] + list(string.ascii_lowercase)
1625
-
1626
-
1627
- def is_safe_path(base, filepath, is_symlink=False):
1628
- # Avoid path traversal attacks by ensuring that the path is safe
1629
- resolved_filepath = (
1630
- os.path.abspath(filepath) if not is_symlink else os.path.realpath(filepath)
1631
- )
1632
- return base == os.path.commonpath((base, resolved_filepath))
mlrun/utils/http.py CHANGED
@@ -110,9 +110,9 @@ class HTTPSessionWithRetry(requests.Session):
110
110
  def request(self, method, url, **kwargs):
111
111
  retry_count = 0
112
112
  kwargs.setdefault("headers", {})
113
- kwargs["headers"]["User-Agent"] = (
114
- f"{requests.utils.default_user_agent()} mlrun/{config.version}"
115
- )
113
+ kwargs["headers"][
114
+ "User-Agent"
115
+ ] = f"{requests.utils.default_user_agent()} mlrun/{config.version}"
116
116
  while True:
117
117
  try:
118
118
  response = super().request(method, url, **kwargs)
mlrun/utils/logger.py CHANGED
@@ -186,7 +186,7 @@ class FormatterKinds(Enum):
186
186
  JSON = "json"
187
187
 
188
188
 
189
- def create_formatter_instance(formatter_kind: FormatterKinds) -> logging.Formatter:
189
+ def _create_formatter_instance(formatter_kind: FormatterKinds) -> logging.Formatter:
190
190
  return {
191
191
  FormatterKinds.HUMAN: HumanReadableFormatter(),
192
192
  FormatterKinds.HUMAN_EXTENDED: HumanReadableExtendedFormatter(),
@@ -208,7 +208,7 @@ def create_logger(
208
208
  logger_instance = Logger(level, name=name, propagate=False)
209
209
 
210
210
  # resolve formatter
211
- formatter_instance = create_formatter_instance(
211
+ formatter_instance = _create_formatter_instance(
212
212
  FormatterKinds(formatter_kind.lower())
213
213
  )
214
214
 
@@ -307,9 +307,9 @@ class NotificationPusher(_NotificationPusherBase):
307
307
  traceback=traceback.format_exc(),
308
308
  )
309
309
  update_notification_status_kwargs["reason"] = f"Exception error: {str(exc)}"
310
- update_notification_status_kwargs["status"] = (
311
- mlrun.common.schemas.NotificationStatus.ERROR
312
- )
310
+ update_notification_status_kwargs[
311
+ "status"
312
+ ] = mlrun.common.schemas.NotificationStatus.ERROR
313
313
  raise exc
314
314
  finally:
315
315
  self._update_notification_status(
@@ -356,9 +356,9 @@ class NotificationPusher(_NotificationPusherBase):
356
356
  traceback=traceback.format_exc(),
357
357
  )
358
358
  update_notification_status_kwargs["reason"] = f"Exception error: {str(exc)}"
359
- update_notification_status_kwargs["status"] = (
360
- mlrun.common.schemas.NotificationStatus.ERROR
361
- )
359
+ update_notification_status_kwargs[
360
+ "status"
361
+ ] = mlrun.common.schemas.NotificationStatus.ERROR
362
362
  raise exc
363
363
  finally:
364
364
  await mlrun.utils.helpers.run_in_threadpool(
@@ -1,4 +1,4 @@
1
1
  {
2
- "git_commit": "467e492c609b314126e880f26a0c116253d4a48c",
3
- "version": "1.6.2-rc5"
2
+ "git_commit": "80404fadf38217c0390793770297cb046629b439",
3
+ "version": "1.6.2-rc6"
4
4
  }
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: mlrun
3
- Version: 1.6.2rc5
3
+ Version: 1.6.2rc6
4
4
  Summary: Tracking and config of machine learning runs
5
5
  Home-page: https://github.com/mlrun/mlrun
6
6
  Author: Yaron Haviv
@@ -36,7 +36,7 @@ Requires-Dist: pyarrow <15,>=10.0
36
36
  Requires-Dist: pyyaml ~=5.1
37
37
  Requires-Dist: requests ~=2.31
38
38
  Requires-Dist: tabulate ~=0.8.6
39
- Requires-Dist: v3io ~=0.6.2
39
+ Requires-Dist: v3io ~=0.5.21
40
40
  Requires-Dist: pydantic >=1.10.8,~=1.10
41
41
  Requires-Dist: mergedeep ~=1.3
42
42
  Requires-Dist: v3io-frames ~=0.10.12
@@ -44,10 +44,10 @@ Requires-Dist: semver ~=3.0
44
44
  Requires-Dist: dependency-injector ~=4.41
45
45
  Requires-Dist: fsspec ==2023.9.2
46
46
  Requires-Dist: v3iofs ~=0.1.17
47
- Requires-Dist: storey ~=1.6.19
47
+ Requires-Dist: storey ~=1.6.18
48
48
  Requires-Dist: inflection ~=0.5.0
49
49
  Requires-Dist: python-dotenv ~=0.17.0
50
- Requires-Dist: setuptools ~=69.1
50
+ Requires-Dist: setuptools ~=68.2
51
51
  Requires-Dist: deprecated ~=1.2
52
52
  Requires-Dist: jinja2 >=3.1.3,~=3.1
53
53
  Requires-Dist: anyio ~=3.7
@@ -80,11 +80,12 @@ Requires-Dist: sqlalchemy ~=1.4 ; extra == 'all'
80
80
  Provides-Extra: api
81
81
  Requires-Dist: uvicorn ~=0.27.1 ; extra == 'api'
82
82
  Requires-Dist: dask-kubernetes ~=0.11.0 ; extra == 'api'
83
- Requires-Dist: apscheduler <4,>=3.10.3 ; extra == 'api'
84
- Requires-Dist: objgraph ~=3.6 ; extra == 'api'
85
- Requires-Dist: igz-mgmt ~=0.1.0 ; extra == 'api'
86
- Requires-Dist: humanfriendly ~=10.0 ; extra == 'api'
87
- Requires-Dist: fastapi ~=0.110.0 ; extra == 'api'
83
+ Requires-Dist: apscheduler !=3.10.2,~=3.6 ; extra == 'api'
84
+ Requires-Dist: sqlite3-to-mysql ~=1.4 ; extra == 'api'
85
+ Requires-Dist: objgraph ~=3.5 ; extra == 'api'
86
+ Requires-Dist: igz-mgmt ~=0.0.10 ; extra == 'api'
87
+ Requires-Dist: humanfriendly ~=9.2 ; extra == 'api'
88
+ Requires-Dist: fastapi ~=0.103.2 ; extra == 'api'
88
89
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'api'
89
90
  Requires-Dist: pymysql ~=1.0 ; extra == 'api'
90
91
  Requires-Dist: alembic ~=1.9 ; extra == 'api'
@@ -126,7 +127,7 @@ Provides-Extra: complete-api
126
127
  Requires-Dist: adlfs ==2023.9.0 ; extra == 'complete-api'
127
128
  Requires-Dist: aiobotocore <2.8,>=2.5.0 ; extra == 'complete-api'
128
129
  Requires-Dist: alembic ~=1.9 ; extra == 'complete-api'
129
- Requires-Dist: apscheduler <4,>=3.10.3 ; extra == 'complete-api'
130
+ Requires-Dist: apscheduler !=3.10.2,~=3.6 ; extra == 'complete-api'
130
131
  Requires-Dist: avro ~=1.11 ; extra == 'complete-api'
131
132
  Requires-Dist: azure-core ~=1.24 ; extra == 'complete-api'
132
133
  Requires-Dist: azure-identity ~=1.5 ; extra == 'complete-api'
@@ -136,22 +137,23 @@ Requires-Dist: dask-kubernetes ~=0.11.0 ; extra == 'complete-api'
136
137
  Requires-Dist: dask ~=2023.9.0 ; extra == 'complete-api'
137
138
  Requires-Dist: databricks-sdk ~=0.13.0 ; extra == 'complete-api'
138
139
  Requires-Dist: distributed ~=2023.9.0 ; extra == 'complete-api'
139
- Requires-Dist: fastapi ~=0.110.0 ; extra == 'complete-api'
140
+ Requires-Dist: fastapi ~=0.103.2 ; extra == 'complete-api'
140
141
  Requires-Dist: gcsfs ==2023.9.2 ; extra == 'complete-api'
141
142
  Requires-Dist: google-cloud-bigquery[bqstorage,pandas] ==3.14.1 ; extra == 'complete-api'
142
143
  Requires-Dist: graphviz ~=0.20.0 ; extra == 'complete-api'
143
- Requires-Dist: humanfriendly ~=10.0 ; extra == 'complete-api'
144
- Requires-Dist: igz-mgmt ~=0.1.0 ; extra == 'complete-api'
144
+ Requires-Dist: humanfriendly ~=9.2 ; extra == 'complete-api'
145
+ Requires-Dist: igz-mgmt ~=0.0.10 ; extra == 'complete-api'
145
146
  Requires-Dist: kafka-python ~=2.0 ; extra == 'complete-api'
146
147
  Requires-Dist: mlflow ~=2.8 ; extra == 'complete-api'
147
148
  Requires-Dist: msrest ~=0.6.21 ; extra == 'complete-api'
148
- Requires-Dist: objgraph ~=3.6 ; extra == 'complete-api'
149
+ Requires-Dist: objgraph ~=3.5 ; extra == 'complete-api'
149
150
  Requires-Dist: plotly <5.12.0,~=5.4 ; extra == 'complete-api'
150
151
  Requires-Dist: pymysql ~=1.0 ; extra == 'complete-api'
151
152
  Requires-Dist: pyopenssl >=23 ; extra == 'complete-api'
152
153
  Requires-Dist: redis ~=4.3 ; extra == 'complete-api'
153
154
  Requires-Dist: s3fs ==2023.9.2 ; extra == 'complete-api'
154
155
  Requires-Dist: sqlalchemy ~=1.4 ; extra == 'complete-api'
156
+ Requires-Dist: sqlite3-to-mysql ~=1.4 ; extra == 'complete-api'
155
157
  Requires-Dist: timelength ~=1.1 ; extra == 'complete-api'
156
158
  Requires-Dist: uvicorn ~=0.27.1 ; extra == 'complete-api'
157
159
  Provides-Extra: dask