metaflow 2.15.6__py2.py3-none-any.whl → 2.15.8__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (28) hide show
  1. metaflow/cli.py +8 -0
  2. metaflow/cli_components/run_cmds.py +2 -2
  3. metaflow/cmd/main_cli.py +1 -1
  4. metaflow/metadata_provider/metadata.py +35 -0
  5. metaflow/metaflow_config.py +6 -0
  6. metaflow/metaflow_environment.py +6 -1
  7. metaflow/metaflow_git.py +115 -0
  8. metaflow/metaflow_version.py +2 -2
  9. metaflow/plugins/__init__.py +1 -0
  10. metaflow/plugins/argo/argo_workflows.py +66 -17
  11. metaflow/plugins/argo/argo_workflows_cli.py +11 -0
  12. metaflow/plugins/argo/argo_workflows_deployer_objects.py +7 -6
  13. metaflow/plugins/aws/aws_client.py +4 -3
  14. metaflow/plugins/datatools/s3/s3.py +46 -44
  15. metaflow/plugins/datatools/s3/s3op.py +133 -63
  16. metaflow/plugins/uv/__init__.py +0 -0
  17. metaflow/plugins/uv/bootstrap.py +100 -0
  18. metaflow/plugins/uv/uv_environment.py +70 -0
  19. metaflow/version.py +1 -1
  20. {metaflow-2.15.6.data → metaflow-2.15.8.data}/data/share/metaflow/devtools/Makefile +4 -2
  21. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info}/METADATA +4 -3
  22. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info}/RECORD +28 -24
  23. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info}/WHEEL +1 -1
  24. {metaflow-2.15.6.data → metaflow-2.15.8.data}/data/share/metaflow/devtools/Tiltfile +0 -0
  25. {metaflow-2.15.6.data → metaflow-2.15.8.data}/data/share/metaflow/devtools/pick_services.sh +0 -0
  26. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info}/entry_points.txt +0 -0
  27. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info/licenses}/LICENSE +0 -0
  28. {metaflow-2.15.6.dist-info → metaflow-2.15.8.dist-info}/top_level.txt +0 -0
metaflow/cli.py CHANGED
@@ -17,6 +17,7 @@ from .flowspec import _FlowState
17
17
  from .graph import FlowGraph
18
18
  from .metaflow_config import (
19
19
  DEFAULT_DATASTORE,
20
+ DEFAULT_DECOSPECS,
20
21
  DEFAULT_ENVIRONMENT,
21
22
  DEFAULT_EVENT_LOGGER,
22
23
  DEFAULT_METADATA,
@@ -509,9 +510,16 @@ def start(
509
510
  ):
510
511
  # run/resume are special cases because they can add more decorators with --with,
511
512
  # so they have to take care of themselves.
513
+
512
514
  all_decospecs = ctx.obj.tl_decospecs + list(
513
515
  ctx.obj.environment.decospecs() or []
514
516
  )
517
+
518
+ # We add the default decospecs for everything except init and step since in those
519
+ # cases, the decospecs will already have been handled by either a run/resume
520
+ # or a scheduler setting them up in their own way.
521
+ if ctx.saved_args[0] not in ("step", "init"):
522
+ all_decospecs += DEFAULT_DECOSPECS.split()
515
523
  if all_decospecs:
516
524
  decorators._attach_decorators(ctx.obj.flow, all_decospecs)
517
525
  decorators._init(ctx.obj.flow)
@@ -71,7 +71,7 @@ def write_file(file_path, content):
71
71
  f.write(str(content))
72
72
 
73
73
 
74
- def config_merge_cb(ctx, param, value):
74
+ def config_callback(ctx, param, value):
75
75
  # Callback to:
76
76
  # - read the Click auto_envvar variable from both the
77
77
  # environment AND the configuration
@@ -127,7 +127,7 @@ def common_run_options(func):
127
127
  help="Add a decorator to all steps. You can specify this "
128
128
  "option multiple times to attach multiple decorators "
129
129
  "in steps.",
130
- callback=config_merge_cb,
130
+ callback=config_callback,
131
131
  )
132
132
  @click.option(
133
133
  "--run-id-file",
metaflow/cmd/main_cli.py CHANGED
@@ -94,7 +94,7 @@ def start(ctx):
94
94
  echo("(%s)\n" % version, fg="magenta", bold=False)
95
95
 
96
96
  if ctx.invoked_subcommand is None:
97
- echo("More data science, less engineering\n", fg="magenta")
97
+ echo("More AI, less engineering\n", fg="magenta")
98
98
 
99
99
  lnk_sz = max(len(lnk) for lnk in CONTACT_INFO.values()) + 1
100
100
  for what, lnk in CONTACT_INFO.items():
@@ -630,6 +630,20 @@ class MetadataProvider(object):
630
630
  sys_info["r_version"] = env["r_version_code"]
631
631
  return sys_info
632
632
 
633
+ def _get_git_info_as_dict(self):
634
+ git_info = {}
635
+ env = self._environment.get_environment_info()
636
+ for key in [
637
+ "repo_url",
638
+ "branch_name",
639
+ "commit_sha",
640
+ "has_uncommitted_changes",
641
+ ]:
642
+ if key in env and env[key]:
643
+ git_info[key] = env[key]
644
+
645
+ return git_info
646
+
633
647
  def _get_system_tags(self):
634
648
  """Convert system info dictionary into a list of system tags"""
635
649
  return [
@@ -670,6 +684,27 @@ class MetadataProvider(object):
670
684
  tags=["attempt_id:{0}".format(attempt)],
671
685
  )
672
686
  )
687
+ # Add script name as metadata
688
+ script_name = self._environment.get_environment_info()["script"]
689
+ metadata.append(
690
+ MetaDatum(
691
+ field="script-name",
692
+ value=script_name,
693
+ type="script-name",
694
+ tags=["attempt_id:{0}".format(attempt)],
695
+ )
696
+ )
697
+ # And add git metadata
698
+ git_info = self._get_git_info_as_dict()
699
+ if git_info:
700
+ metadata.append(
701
+ MetaDatum(
702
+ field="git-info",
703
+ value=json.dumps(git_info),
704
+ type="git-info",
705
+ tags=["attempt_id:{0}".format(attempt)],
706
+ )
707
+ )
673
708
  if metadata:
674
709
  self.register_metadata(run_id, step_name, task_id, metadata)
675
710
 
@@ -109,6 +109,12 @@ S3_WORKER_COUNT = from_conf("S3_WORKER_COUNT", 64)
109
109
  # top-level retries)
110
110
  S3_TRANSIENT_RETRY_COUNT = from_conf("S3_TRANSIENT_RETRY_COUNT", 20)
111
111
 
112
+ # S3 retry configuration used in the aws client
113
+ # Use the adaptive retry strategy by default
114
+ S3_CLIENT_RETRY_CONFIG = from_conf(
115
+ "S3_CLIENT_RETRY_CONFIG", {"max_attempts": 10, "mode": "adaptive"}
116
+ )
117
+
112
118
  # Threshold to start printing warnings for an AWS retry
113
119
  RETRY_WARNING_THRESHOLD = 3
114
120
 
@@ -4,6 +4,7 @@ import sys
4
4
 
5
5
  from .util import get_username
6
6
  from . import metaflow_version
7
+ from . import metaflow_git
7
8
  from metaflow.exception import MetaflowException
8
9
  from metaflow.extension_support import dump_module_info
9
10
  from metaflow.mflog import BASH_MFLOG, BASH_FLUSH_LOGS
@@ -197,6 +198,10 @@ class MetaflowEnvironment(object):
197
198
  "python_version_code": "%d.%d.%d" % sys.version_info[:3],
198
199
  "metaflow_version": metaflow_version.get_version(),
199
200
  "script": os.path.basename(os.path.abspath(sys.argv[0])),
201
+ # Add git info
202
+ **metaflow_git.get_repository_info(
203
+ path=os.path.dirname(os.path.abspath(sys.argv[0]))
204
+ ),
200
205
  }
201
206
  if R.use_r():
202
207
  env["metaflow_r_version"] = R.metaflow_r_version()
@@ -206,7 +211,7 @@ class MetaflowEnvironment(object):
206
211
  # Information about extension modules (to load them in the proper order)
207
212
  ext_key, ext_val = dump_module_info()
208
213
  env[ext_key] = ext_val
209
- return env
214
+ return {k: v for k, v in env.items() if v is not None and v != ""}
210
215
 
211
216
  def executable(self, step_name, default=None):
212
217
  if default is not None:
@@ -0,0 +1,115 @@
1
+ #!/usr/bin/env python
2
+ """Get git repository information for the package
3
+
4
+ Functions to retrieve git repository details like URL, branch name,
5
+ and commit SHA for Metaflow code provenance tracking.
6
+ """
7
+
8
+ import os
9
+ import subprocess
10
+ from typing import Dict, List, Optional, Tuple, Union
11
+
12
+ # Cache for git information to avoid repeated subprocess calls
13
+ _git_info_cache = None
14
+
15
+ __all__ = ("get_repository_info",)
16
+
17
+
18
+ def _call_git(
19
+ args: List[str], path=Union[str, os.PathLike]
20
+ ) -> Tuple[Optional[str], Optional[int], bool]:
21
+ """
22
+ Call git with provided args.
23
+
24
+ Returns
25
+ -------
26
+ tuple : Tuple containing
27
+ (stdout, exitcode, failure) of the call
28
+ """
29
+ try:
30
+ result = subprocess.run(
31
+ ["git", *args],
32
+ cwd=path,
33
+ capture_output=True,
34
+ text=True,
35
+ check=False,
36
+ )
37
+ return result.stdout.strip(), result.returncode, False
38
+ except (OSError, subprocess.SubprocessError):
39
+ # Covers subprocess timeouts and other errors which would not lead to an exit code
40
+ return None, None, True
41
+
42
+
43
+ def _get_repo_url(path: Union[str, os.PathLike]) -> Optional[str]:
44
+ """Get the repository URL from git config"""
45
+ stdout, returncode, _failed = _call_git(
46
+ ["config", "--get", "remote.origin.url"], path
47
+ )
48
+ if returncode == 0:
49
+ url = stdout
50
+ # Convert SSH URLs to HTTPS for clickable links
51
+ if url.startswith("git@"):
52
+ parts = url.split(":", 1)
53
+ if len(parts) == 2:
54
+ domain = parts[0].replace("git@", "")
55
+ repo_path = parts[1]
56
+ url = f"https://{domain}/{repo_path}"
57
+ return url
58
+ return None
59
+
60
+
61
+ def _get_branch_name(path: Union[str, os.PathLike]) -> Optional[str]:
62
+ """Get the current git branch name"""
63
+ stdout, returncode, _failed = _call_git(["rev-parse", "--abbrev-ref", "HEAD"], path)
64
+ return stdout if returncode == 0 else None
65
+
66
+
67
+ def _get_commit_sha(path: Union[str, os.PathLike]) -> Optional[str]:
68
+ """Get the current git commit SHA"""
69
+ stdout, returncode, _failed = _call_git(["rev-parse", "HEAD"], path)
70
+ return stdout if returncode == 0 else None
71
+
72
+
73
+ def _is_in_git_repo(path: Union[str, os.PathLike]) -> bool:
74
+ """Check if we're currently in a git repository"""
75
+ stdout, returncode, _failed = _call_git(
76
+ ["rev-parse", "--is-inside-work-tree"], path
77
+ )
78
+ return returncode == 0 and stdout == "true"
79
+
80
+
81
+ def _has_uncommitted_changes(path: Union[str, os.PathLike]) -> Optional[bool]:
82
+ """Check if the git repository has uncommitted changes"""
83
+ _stdout, returncode, failed = _call_git(
84
+ ["diff-index", "--quiet", "HEAD", "--"], path
85
+ )
86
+ if failed:
87
+ return None
88
+ return returncode != 0
89
+
90
+
91
+ def get_repository_info(path: Union[str, os.PathLike]) -> Dict[str, Union[str, bool]]:
92
+ """Get git repository information for a path
93
+
94
+ Returns:
95
+ dict: Dictionary containing:
96
+ repo_url: Repository URL (converted to HTTPS if from SSH)
97
+ branch_name: Current branch name
98
+ commit_sha: Current commit SHA
99
+ has_uncommitted_changes: Boolean indicating if there are uncommitted changes
100
+ """
101
+ global _git_info_cache
102
+
103
+ if _git_info_cache is not None:
104
+ return _git_info_cache
105
+
106
+ _git_info_cache = {}
107
+ if _is_in_git_repo(path):
108
+ _git_info_cache = {
109
+ "repo_url": _get_repo_url(path),
110
+ "branch_name": _get_branch_name(path),
111
+ "commit_sha": _get_commit_sha(path),
112
+ "has_uncommitted_changes": _has_uncommitted_changes(path),
113
+ }
114
+
115
+ return _git_info_cache
@@ -27,11 +27,11 @@ if name == "nt":
27
27
  """find the path to the git executable on Windows"""
28
28
  # first see if git is in the path
29
29
  try:
30
- check_output(["where", "/Q", "git"])
30
+ subprocess.check_output(["where", "/Q", "git"])
31
31
  # if this command succeeded, git is in the path
32
32
  return "git"
33
33
  # catch the exception thrown if git was not found
34
- except CalledProcessError:
34
+ except subprocess.CalledProcessError:
35
35
  pass
36
36
  # There are several locations where git.exe may be hiding
37
37
  possible_locations = []
@@ -75,6 +75,7 @@ FLOW_DECORATORS_DESC = [
75
75
  ENVIRONMENTS_DESC = [
76
76
  ("conda", ".pypi.conda_environment.CondaEnvironment"),
77
77
  ("pypi", ".pypi.pypi_environment.PyPIEnvironment"),
78
+ ("uv", ".uv.uv_environment.UVEnvironment"),
78
79
  ]
79
80
 
80
81
  # Add metadata providers here
@@ -7,6 +7,7 @@ import sys
7
7
  from collections import defaultdict
8
8
  from hashlib import sha1
9
9
  from math import inf
10
+ from typing import List
10
11
 
11
12
  from metaflow import JSONType, current
12
13
  from metaflow.decorators import flow_decorators
@@ -110,6 +111,7 @@ class ArgoWorkflows(object):
110
111
  notify_pager_duty_integration_key=None,
111
112
  notify_incident_io_api_key=None,
112
113
  incident_io_alert_source_config_id=None,
114
+ incident_io_metadata: List[str] = None,
113
115
  enable_heartbeat_daemon=True,
114
116
  enable_error_msg_capture=False,
115
117
  ):
@@ -161,6 +163,9 @@ class ArgoWorkflows(object):
161
163
  self.notify_pager_duty_integration_key = notify_pager_duty_integration_key
162
164
  self.notify_incident_io_api_key = notify_incident_io_api_key
163
165
  self.incident_io_alert_source_config_id = incident_io_alert_source_config_id
166
+ self.incident_io_metadata = self.parse_incident_io_metadata(
167
+ incident_io_metadata
168
+ )
164
169
  self.enable_heartbeat_daemon = enable_heartbeat_daemon
165
170
  self.enable_error_msg_capture = enable_error_msg_capture
166
171
  self.parameters = self._process_parameters()
@@ -287,6 +292,21 @@ class ArgoWorkflows(object):
287
292
 
288
293
  return True
289
294
 
295
+ @staticmethod
296
+ def parse_incident_io_metadata(metadata: List[str] = None):
297
+ "parse key value pairs into a dict for incident.io metadata if given"
298
+ parsed_metadata = None
299
+ if metadata is not None:
300
+ parsed_metadata = {}
301
+ for kv in metadata:
302
+ key, value = kv.split("=", 1)
303
+ if key in parsed_metadata:
304
+ raise MetaflowException(
305
+ "Incident.io Metadata *%s* provided multiple times" % key
306
+ )
307
+ parsed_metadata[key] = value
308
+ return parsed_metadata
309
+
290
310
  @classmethod
291
311
  def trigger(cls, name, parameters=None):
292
312
  if parameters is None:
@@ -524,6 +544,7 @@ class ArgoWorkflows(object):
524
544
  default_value = json.dumps(default_value)
525
545
 
526
546
  parameters[param.name] = dict(
547
+ python_var_name=var,
527
548
  name=param.name,
528
549
  value=default_value,
529
550
  type=param_type,
@@ -838,7 +859,11 @@ class ArgoWorkflows(object):
838
859
  Arguments().parameters(
839
860
  [
840
861
  Parameter(parameter["name"])
841
- .value(parameter["value"])
862
+ .value(
863
+ "'%s'" % parameter["value"]
864
+ if parameter["type"] == "JSON"
865
+ else parameter["value"]
866
+ )
842
867
  .description(parameter.get("description"))
843
868
  # TODO: Better handle IncludeFile in Argo Workflows UI.
844
869
  for parameter in self.parameters.values()
@@ -1594,11 +1619,7 @@ class ArgoWorkflows(object):
1594
1619
  # {{foo.bar['param_name']}}.
1595
1620
  # https://argoproj.github.io/argo-events/tutorials/02-parameterization/
1596
1621
  # http://masterminds.github.io/sprig/strings.html
1597
- (
1598
- "--%s='{{workflow.parameters.%s}}'"
1599
- if parameter["type"] == "JSON"
1600
- else "--%s={{workflow.parameters.%s}}"
1601
- )
1622
+ "--%s={{workflow.parameters.%s}}"
1602
1623
  % (parameter["name"], parameter["name"])
1603
1624
  for parameter in self.parameters.values()
1604
1625
  ]
@@ -2551,9 +2572,12 @@ class ArgoWorkflows(object):
2551
2572
  else None
2552
2573
  ),
2553
2574
  "metadata": {
2554
- "run_status": "failed",
2555
- "flow_name": self.flow.name,
2556
- "run_id": "argo-{{workflow.name}}",
2575
+ **(self.incident_io_metadata or {}),
2576
+ **{
2577
+ "run_status": "failed",
2578
+ "flow_name": self.flow.name,
2579
+ "run_id": "argo-{{workflow.name}}",
2580
+ },
2557
2581
  },
2558
2582
  }
2559
2583
  )
@@ -2602,9 +2626,12 @@ class ArgoWorkflows(object):
2602
2626
  else None
2603
2627
  ),
2604
2628
  "metadata": {
2605
- "run_status": "succeeded",
2606
- "flow_name": self.flow.name,
2607
- "run_id": "argo-{{workflow.name}}",
2629
+ **(self.incident_io_metadata or {}),
2630
+ **{
2631
+ "run_status": "succeeded",
2632
+ "flow_name": self.flow.name,
2633
+ "run_id": "argo-{{workflow.name}}",
2634
+ },
2608
2635
  },
2609
2636
  }
2610
2637
  )
@@ -3156,15 +3183,37 @@ class ArgoWorkflows(object):
3156
3183
  # NOTE: We need the conditional logic in order to successfully fall back to the default value
3157
3184
  # when the event payload does not contain a key for a parameter.
3158
3185
  # NOTE: Keys might contain dashes, so use the safer 'get' for fetching the value
3159
- data_template='{{ if (hasKey $.Input.body.payload "%s") }}{{- (get $.Input.body.payload "%s" | toRawJson) -}}{{- else -}}{{ (fail "use-default-instead") }}{{- end -}}'
3160
- % (v, v),
3186
+ data_template='{{ if (hasKey $.Input.body.payload "%s") }}{{- (get $.Input.body.payload "%s" %s) -}}{{- else -}}{{ (fail "use-default-instead") }}{{- end -}}'
3187
+ % (
3188
+ v,
3189
+ v,
3190
+ (
3191
+ "| toRawJson | squote"
3192
+ if self.parameters[
3193
+ parameter_name
3194
+ ]["type"]
3195
+ == "JSON"
3196
+ else "| toRawJson"
3197
+ ),
3198
+ ),
3161
3199
  # Unfortunately the sensor needs to
3162
3200
  # record the default values for
3163
3201
  # the parameters - there doesn't seem
3164
3202
  # to be any way for us to skip
3165
- value=self.parameters[parameter_name][
3166
- "value"
3167
- ],
3203
+ value=(
3204
+ json.dumps(
3205
+ self.parameters[parameter_name][
3206
+ "value"
3207
+ ]
3208
+ )
3209
+ if self.parameters[parameter_name][
3210
+ "type"
3211
+ ]
3212
+ == "JSON"
3213
+ else self.parameters[
3214
+ parameter_name
3215
+ ]["value"]
3216
+ ),
3168
3217
  )
3169
3218
  .dest(
3170
3219
  # this undocumented (mis?)feature in
@@ -187,6 +187,13 @@ def argo_workflows(obj, name=None):
187
187
  default=None,
188
188
  help="Incident.io Alert source config ID. Example '01GW2G3V0S59R238FAHPDS1R66'",
189
189
  )
190
+ @click.option(
191
+ "--incident-io-metadata",
192
+ default=None,
193
+ type=str,
194
+ multiple=True,
195
+ help="Incident.io Alert Custom Metadata field in the form of Key=Value",
196
+ )
190
197
  @click.option(
191
198
  "--enable-heartbeat-daemon/--no-enable-heartbeat-daemon",
192
199
  default=False,
@@ -226,6 +233,7 @@ def create(
226
233
  notify_pager_duty_integration_key=None,
227
234
  notify_incident_io_api_key=None,
228
235
  incident_io_alert_source_config_id=None,
236
+ incident_io_metadata=None,
229
237
  enable_heartbeat_daemon=True,
230
238
  deployer_attribute_file=None,
231
239
  enable_error_msg_capture=False,
@@ -283,6 +291,7 @@ def create(
283
291
  notify_pager_duty_integration_key,
284
292
  notify_incident_io_api_key,
285
293
  incident_io_alert_source_config_id,
294
+ incident_io_metadata,
286
295
  enable_heartbeat_daemon,
287
296
  enable_error_msg_capture,
288
297
  )
@@ -459,6 +468,7 @@ def make_flow(
459
468
  notify_pager_duty_integration_key,
460
469
  notify_incident_io_api_key,
461
470
  incident_io_alert_source_config_id,
471
+ incident_io_metadata,
462
472
  enable_heartbeat_daemon,
463
473
  enable_error_msg_capture,
464
474
  ):
@@ -538,6 +548,7 @@ def make_flow(
538
548
  notify_pager_duty_integration_key=notify_pager_duty_integration_key,
539
549
  notify_incident_io_api_key=notify_incident_io_api_key,
540
550
  incident_io_alert_source_config_id=incident_io_alert_source_config_id,
551
+ incident_io_metadata=incident_io_metadata,
541
552
  enable_heartbeat_daemon=enable_heartbeat_daemon,
542
553
  enable_error_msg_capture=enable_error_msg_capture,
543
554
  )
@@ -19,6 +19,7 @@ def generate_fake_flow_file_contents(
19
19
  ):
20
20
  params_code = ""
21
21
  for _, param_details in param_info.items():
22
+ param_python_var_name = param_details["python_var_name"]
22
23
  param_name = param_details["name"]
23
24
  param_type = param_details["type"]
24
25
  param_help = param_details["description"]
@@ -26,21 +27,21 @@ def generate_fake_flow_file_contents(
26
27
 
27
28
  if param_type == "JSON":
28
29
  params_code += (
29
- f" {param_name} = Parameter('{param_name}', "
30
- f"type=JSONType, help='{param_help}', required={param_required})\n"
30
+ f" {param_python_var_name} = Parameter('{param_name}', "
31
+ f"type=JSONType, help='''{param_help}''', required={param_required})\n"
31
32
  )
32
33
  elif param_type == "FilePath":
33
34
  is_text = param_details.get("is_text", True)
34
35
  encoding = param_details.get("encoding", "utf-8")
35
36
  params_code += (
36
- f" {param_name} = IncludeFile('{param_name}', "
37
- f"is_text={is_text}, encoding='{encoding}', help='{param_help}', "
37
+ f" {param_python_var_name} = IncludeFile('{param_name}', "
38
+ f"is_text={is_text}, encoding='{encoding}', help='''{param_help}''', "
38
39
  f"required={param_required})\n"
39
40
  )
40
41
  else:
41
42
  params_code += (
42
- f" {param_name} = Parameter('{param_name}', "
43
- f"type={param_type}, help='{param_help}', required={param_required})\n"
43
+ f" {param_python_var_name} = Parameter('{param_name}', "
44
+ f"type={param_type}, help='''{param_help}''', required={param_required})\n"
44
45
  )
45
46
 
46
47
  project_decorator = f"@project(name='{project_name}')\n" if project_name else ""
@@ -14,6 +14,7 @@ class Boto3ClientProvider(object):
14
14
  AWS_SANDBOX_ENABLED,
15
15
  AWS_SANDBOX_STS_ENDPOINT_URL,
16
16
  AWS_SANDBOX_API_KEY,
17
+ S3_CLIENT_RETRY_CONFIG,
17
18
  )
18
19
 
19
20
  if session_vars is None:
@@ -37,10 +38,10 @@ class Boto3ClientProvider(object):
37
38
  if module == "s3" and (
38
39
  "config" not in client_params or client_params["config"].retries is None
39
40
  ):
40
- # Use the adaptive retry strategy by default -- do not set anything if
41
- # the user has already set something
41
+ # do not set anything if the user has already set something
42
42
  config = client_params.get("config", Config())
43
- config.retries = {"max_attempts": 10, "mode": "adaptive"}
43
+ config.retries = S3_CLIENT_RETRY_CONFIG
44
+ client_params["config"] = config
44
45
 
45
46
  if AWS_SANDBOX_ENABLED:
46
47
  # role is ignored in the sandbox