etlplus 0.16.0__py3-none-any.whl → 0.16.6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus/README.md +22 -0
- etlplus/__init__.py +2 -0
- etlplus/api/__init__.py +14 -14
- etlplus/api/auth.py +9 -6
- etlplus/api/config.py +6 -6
- etlplus/api/endpoint_client.py +16 -16
- etlplus/api/errors.py +4 -4
- etlplus/api/pagination/__init__.py +6 -6
- etlplus/api/pagination/config.py +11 -9
- etlplus/api/rate_limiting/__init__.py +2 -2
- etlplus/api/rate_limiting/config.py +10 -10
- etlplus/api/rate_limiting/rate_limiter.py +2 -2
- etlplus/api/request_manager.py +4 -4
- etlplus/api/retry_manager.py +6 -6
- etlplus/api/transport.py +10 -10
- etlplus/api/types.py +47 -26
- etlplus/api/utils.py +49 -49
- etlplus/cli/commands.py +22 -22
- etlplus/cli/handlers.py +12 -13
- etlplus/{workflow/pipeline.py → config.py} +54 -91
- etlplus/connector/__init__.py +6 -6
- etlplus/connector/api.py +7 -7
- etlplus/connector/database.py +3 -3
- etlplus/connector/file.py +3 -3
- etlplus/connector/types.py +2 -2
- etlplus/enums.py +35 -167
- etlplus/ops/__init__.py +1 -0
- etlplus/ops/enums.py +173 -0
- etlplus/ops/extract.py +209 -22
- etlplus/ops/load.py +140 -34
- etlplus/ops/run.py +88 -103
- etlplus/ops/transform.py +46 -27
- etlplus/ops/types.py +147 -0
- etlplus/ops/utils.py +5 -5
- etlplus/ops/validate.py +13 -13
- etlplus/types.py +5 -102
- etlplus/workflow/README.md +0 -24
- etlplus/workflow/__init__.py +2 -4
- etlplus/workflow/dag.py +23 -1
- etlplus/workflow/jobs.py +15 -28
- etlplus/workflow/profile.py +4 -2
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/METADATA +1 -1
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/RECORD +47 -45
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/WHEEL +0 -0
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/entry_points.txt +0 -0
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/licenses/LICENSE +0 -0
- {etlplus-0.16.0.dist-info → etlplus-0.16.6.dist-info}/top_level.txt +0 -0
etlplus/cli/handlers.py
CHANGED
|
@@ -14,6 +14,7 @@ from typing import Any
|
|
|
14
14
|
from typing import Literal
|
|
15
15
|
from typing import cast
|
|
16
16
|
|
|
17
|
+
from .. import Config
|
|
17
18
|
from ..database import load_table_spec
|
|
18
19
|
from ..database import render_tables
|
|
19
20
|
from ..file import File
|
|
@@ -23,11 +24,9 @@ from ..ops import load
|
|
|
23
24
|
from ..ops import run
|
|
24
25
|
from ..ops import transform
|
|
25
26
|
from ..ops import validate
|
|
26
|
-
from ..ops.validate import
|
|
27
|
+
from ..ops.validate import FieldRulesDict
|
|
27
28
|
from ..types import JSONData
|
|
28
29
|
from ..types import TemplateKey
|
|
29
|
-
from ..workflow import PipelineConfig
|
|
30
|
-
from ..workflow import load_pipeline_config
|
|
31
30
|
from . import io as cli_io
|
|
32
31
|
|
|
33
32
|
# SECTION: EXPORTS ========================================================== #
|
|
@@ -73,14 +72,14 @@ def _collect_table_specs(
|
|
|
73
72
|
specs.append(dict(load_table_spec(Path(spec_path))))
|
|
74
73
|
|
|
75
74
|
if config_path:
|
|
76
|
-
cfg =
|
|
75
|
+
cfg = Config.from_yaml(config_path, substitute=True)
|
|
77
76
|
specs.extend(getattr(cfg, 'table_schemas', []))
|
|
78
77
|
|
|
79
78
|
return specs
|
|
80
79
|
|
|
81
80
|
|
|
82
81
|
def _check_sections(
|
|
83
|
-
cfg:
|
|
82
|
+
cfg: Config,
|
|
84
83
|
*,
|
|
85
84
|
jobs: bool,
|
|
86
85
|
pipelines: bool,
|
|
@@ -93,7 +92,7 @@ def _check_sections(
|
|
|
93
92
|
|
|
94
93
|
Parameters
|
|
95
94
|
----------
|
|
96
|
-
cfg :
|
|
95
|
+
cfg : Config
|
|
97
96
|
The loaded pipeline configuration.
|
|
98
97
|
jobs : bool
|
|
99
98
|
Whether to include job metadata.
|
|
@@ -133,14 +132,14 @@ def _check_sections(
|
|
|
133
132
|
|
|
134
133
|
|
|
135
134
|
def _pipeline_summary(
|
|
136
|
-
cfg:
|
|
135
|
+
cfg: Config,
|
|
137
136
|
) -> dict[str, Any]:
|
|
138
137
|
"""
|
|
139
138
|
Return a human-friendly snapshot of a pipeline config.
|
|
140
139
|
|
|
141
140
|
Parameters
|
|
142
141
|
----------
|
|
143
|
-
cfg :
|
|
142
|
+
cfg : Config
|
|
144
143
|
The loaded pipeline configuration.
|
|
145
144
|
|
|
146
145
|
Returns
|
|
@@ -229,7 +228,7 @@ def check_handler(
|
|
|
229
228
|
Zero on success.
|
|
230
229
|
|
|
231
230
|
"""
|
|
232
|
-
cfg =
|
|
231
|
+
cfg = Config.from_yaml(config, substitute=substitute)
|
|
233
232
|
if summary:
|
|
234
233
|
cli_io.emit_json(_pipeline_summary(cfg), pretty=True)
|
|
235
234
|
return 0
|
|
@@ -514,7 +513,7 @@ def run_handler(
|
|
|
514
513
|
int
|
|
515
514
|
Zero on success.
|
|
516
515
|
"""
|
|
517
|
-
cfg =
|
|
516
|
+
cfg = Config.from_yaml(config, substitute=True)
|
|
518
517
|
|
|
519
518
|
job_name = job or pipeline
|
|
520
519
|
if job_name:
|
|
@@ -662,7 +661,7 @@ def validate_handler(
|
|
|
662
661
|
if not isinstance(rules_payload, dict):
|
|
663
662
|
raise ValueError('rules must resolve to a mapping of field rules')
|
|
664
663
|
|
|
665
|
-
field_rules = cast(Mapping[str,
|
|
664
|
+
field_rules = cast(Mapping[str, FieldRulesDict], rules_payload)
|
|
666
665
|
result = validate(payload, field_rules)
|
|
667
666
|
|
|
668
667
|
if target and target != '-':
|
|
@@ -671,11 +670,11 @@ def validate_handler(
|
|
|
671
670
|
cli_io.write_json_output(
|
|
672
671
|
validated_data,
|
|
673
672
|
target,
|
|
674
|
-
success_message='
|
|
673
|
+
success_message='ValidationDict result saved to',
|
|
675
674
|
)
|
|
676
675
|
else:
|
|
677
676
|
print(
|
|
678
|
-
f'
|
|
677
|
+
f'ValidationDict failed, no data to save for {target}',
|
|
679
678
|
file=sys.stderr,
|
|
680
679
|
)
|
|
681
680
|
else:
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""
|
|
2
|
-
:mod:`etlplus.
|
|
2
|
+
:mod:`etlplus.config` module.
|
|
3
3
|
|
|
4
|
-
|
|
4
|
+
Configuration model and helpers for job pipeline orchestration.
|
|
5
5
|
|
|
6
6
|
Notes
|
|
7
7
|
-----
|
|
@@ -24,46 +24,66 @@ from pathlib import Path
|
|
|
24
24
|
from typing import Any
|
|
25
25
|
from typing import Self
|
|
26
26
|
|
|
27
|
-
from
|
|
28
|
-
from
|
|
29
|
-
from
|
|
30
|
-
from
|
|
31
|
-
from
|
|
32
|
-
from
|
|
33
|
-
from
|
|
34
|
-
from
|
|
35
|
-
from
|
|
36
|
-
from .jobs import JobConfig
|
|
37
|
-
from .profile import ProfileConfig
|
|
27
|
+
from .api import ApiConfig
|
|
28
|
+
from .connector import Connector
|
|
29
|
+
from .connector import parse_connector
|
|
30
|
+
from .file import File
|
|
31
|
+
from .file import FileFormat
|
|
32
|
+
from .types import StrAnyMap
|
|
33
|
+
from .utils import coerce_dict
|
|
34
|
+
from .utils import deep_substitute
|
|
35
|
+
from .utils import maybe_mapping
|
|
36
|
+
from .workflow.jobs import JobConfig
|
|
37
|
+
from .workflow.profile import ProfileConfig
|
|
38
38
|
|
|
39
39
|
# SECTION: EXPORTS ========================================================== #
|
|
40
40
|
|
|
41
41
|
|
|
42
42
|
__all__ = [
|
|
43
43
|
# Data Classes
|
|
44
|
-
'
|
|
45
|
-
# Functions
|
|
46
|
-
'load_pipeline_config',
|
|
44
|
+
'Config',
|
|
47
45
|
]
|
|
48
46
|
|
|
49
47
|
|
|
50
48
|
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
51
49
|
|
|
52
50
|
|
|
53
|
-
def
|
|
51
|
+
def _build_connectors(
|
|
54
52
|
raw: StrAnyMap,
|
|
53
|
+
*,
|
|
55
54
|
key: str,
|
|
56
|
-
|
|
57
|
-
) -> list[T]:
|
|
55
|
+
) -> list[Connector]:
|
|
58
56
|
"""
|
|
59
|
-
|
|
57
|
+
Parse connector entries from a list under ``raw[key]``.
|
|
60
58
|
|
|
61
59
|
Parameters
|
|
62
60
|
----------
|
|
63
61
|
raw : StrAnyMap
|
|
64
62
|
Raw pipeline mapping.
|
|
65
63
|
key : str
|
|
66
|
-
Key pointing to
|
|
64
|
+
Key pointing to connector entries (e.g., ``"sources"``).
|
|
65
|
+
|
|
66
|
+
Returns
|
|
67
|
+
-------
|
|
68
|
+
list[Connector]
|
|
69
|
+
Parsed connector instances.
|
|
70
|
+
"""
|
|
71
|
+
return list(
|
|
72
|
+
_collect_parsed(raw.get(key, []) or [], _parse_connector_entry),
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def _collect_parsed[T](
|
|
77
|
+
items: Any,
|
|
78
|
+
parser: Callable[[Any], T | None],
|
|
79
|
+
) -> list[T]:
|
|
80
|
+
"""
|
|
81
|
+
Collect parsed items from ``raw[key]`` using a tolerant parser.
|
|
82
|
+
|
|
83
|
+
Parameters
|
|
84
|
+
----------
|
|
85
|
+
items : Any
|
|
86
|
+
List-like payload to parse.
|
|
67
87
|
parser : Callable[[Any], T | None]
|
|
68
88
|
Parser that returns an instance or ``None`` for invalid entries.
|
|
69
89
|
|
|
@@ -72,12 +92,12 @@ def _collect_parsed[T](
|
|
|
72
92
|
list[T]
|
|
73
93
|
Parsed items, excluding invalid entries.
|
|
74
94
|
"""
|
|
75
|
-
|
|
76
|
-
for entry in
|
|
95
|
+
parsed_items: list[T] = []
|
|
96
|
+
for entry in items or []:
|
|
77
97
|
parsed = parser(entry)
|
|
78
98
|
if parsed is not None:
|
|
79
|
-
|
|
80
|
-
return
|
|
99
|
+
parsed_items.append(parsed)
|
|
100
|
+
return parsed_items
|
|
81
101
|
|
|
82
102
|
|
|
83
103
|
def _parse_connector_entry(
|
|
@@ -104,71 +124,11 @@ def _parse_connector_entry(
|
|
|
104
124
|
return None
|
|
105
125
|
|
|
106
126
|
|
|
107
|
-
def _build_sources(
|
|
108
|
-
raw: StrAnyMap,
|
|
109
|
-
) -> list[Connector]:
|
|
110
|
-
"""
|
|
111
|
-
Return a list of source connectors parsed from the mapping.
|
|
112
|
-
|
|
113
|
-
Parameters
|
|
114
|
-
----------
|
|
115
|
-
raw : StrAnyMap
|
|
116
|
-
Raw pipeline mapping.
|
|
117
|
-
|
|
118
|
-
Returns
|
|
119
|
-
-------
|
|
120
|
-
list[Connector]
|
|
121
|
-
Parsed source connectors.
|
|
122
|
-
"""
|
|
123
|
-
return list(
|
|
124
|
-
_collect_parsed(raw, 'sources', _parse_connector_entry),
|
|
125
|
-
)
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
def _build_targets(
|
|
129
|
-
raw: StrAnyMap,
|
|
130
|
-
) -> list[Connector]:
|
|
131
|
-
"""
|
|
132
|
-
Return a list of target connectors parsed from the mapping.
|
|
133
|
-
|
|
134
|
-
Parameters
|
|
135
|
-
----------
|
|
136
|
-
raw : StrAnyMap
|
|
137
|
-
Raw pipeline mapping.
|
|
138
|
-
|
|
139
|
-
Returns
|
|
140
|
-
-------
|
|
141
|
-
list[Connector]
|
|
142
|
-
Parsed target connectors.
|
|
143
|
-
"""
|
|
144
|
-
return list(
|
|
145
|
-
_collect_parsed(raw, 'targets', _parse_connector_entry),
|
|
146
|
-
)
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
def load_pipeline_config(
|
|
153
|
-
path: Path | str,
|
|
154
|
-
*,
|
|
155
|
-
substitute: bool = False,
|
|
156
|
-
env: Mapping[str, str] | None = None,
|
|
157
|
-
) -> PipelineConfig:
|
|
158
|
-
"""
|
|
159
|
-
Load a pipeline YAML file into a ``PipelineConfig`` instance.
|
|
160
|
-
|
|
161
|
-
Delegates to ``PipelineConfig.from_yaml`` for construction and optional
|
|
162
|
-
variable substitution.
|
|
163
|
-
"""
|
|
164
|
-
return PipelineConfig.from_yaml(path, substitute=substitute, env=env)
|
|
165
|
-
|
|
166
|
-
|
|
167
127
|
# SECTION: DATA CLASSES ===================================================== #
|
|
168
128
|
|
|
169
129
|
|
|
170
130
|
@dataclass(kw_only=True, slots=True)
|
|
171
|
-
class
|
|
131
|
+
class Config:
|
|
172
132
|
"""
|
|
173
133
|
Configuration for the data processing pipeline.
|
|
174
134
|
|
|
@@ -231,7 +191,7 @@ class PipelineConfig:
|
|
|
231
191
|
env: Mapping[str, str] | None = None,
|
|
232
192
|
) -> Self:
|
|
233
193
|
"""
|
|
234
|
-
Parse a YAML file into a ``
|
|
194
|
+
Parse a YAML file into a ``Config`` instance.
|
|
235
195
|
|
|
236
196
|
Parameters
|
|
237
197
|
----------
|
|
@@ -279,7 +239,7 @@ class PipelineConfig:
|
|
|
279
239
|
raw: StrAnyMap,
|
|
280
240
|
) -> Self:
|
|
281
241
|
"""
|
|
282
|
-
Parse a mapping into a ``
|
|
242
|
+
Parse a mapping into a ``Config`` instance.
|
|
283
243
|
|
|
284
244
|
Parameters
|
|
285
245
|
----------
|
|
@@ -311,17 +271,20 @@ class PipelineConfig:
|
|
|
311
271
|
file_systems = coerce_dict(raw.get('file_systems'))
|
|
312
272
|
|
|
313
273
|
# Sources
|
|
314
|
-
sources =
|
|
274
|
+
sources = _build_connectors(raw, key='sources')
|
|
315
275
|
|
|
316
276
|
# Validations/Transforms
|
|
317
277
|
validations = coerce_dict(raw.get('validations'))
|
|
318
278
|
transforms = coerce_dict(raw.get('transforms'))
|
|
319
279
|
|
|
320
280
|
# Targets
|
|
321
|
-
targets =
|
|
281
|
+
targets = _build_connectors(raw, key='targets')
|
|
322
282
|
|
|
323
283
|
# Jobs
|
|
324
|
-
jobs = _collect_parsed(
|
|
284
|
+
jobs: list[JobConfig] = _collect_parsed(
|
|
285
|
+
raw.get('jobs', []) or [],
|
|
286
|
+
JobConfig.from_obj,
|
|
287
|
+
)
|
|
325
288
|
|
|
326
289
|
# Table schemas (optional, tolerant pass-through structures).
|
|
327
290
|
table_schemas: list[dict[str, Any]] = []
|
etlplus/connector/__init__.py
CHANGED
|
@@ -7,15 +7,15 @@ Connector configuration types and enums.
|
|
|
7
7
|
from __future__ import annotations
|
|
8
8
|
|
|
9
9
|
from .api import ConnectorApi
|
|
10
|
-
from .api import
|
|
10
|
+
from .api import ConnectorApiConfigDict
|
|
11
11
|
from .connector import Connector
|
|
12
12
|
from .core import ConnectorBase
|
|
13
13
|
from .core import ConnectorProtocol
|
|
14
14
|
from .database import ConnectorDb
|
|
15
|
-
from .database import
|
|
15
|
+
from .database import ConnectorDbConfigDict
|
|
16
16
|
from .enums import DataConnectorType
|
|
17
17
|
from .file import ConnectorFile
|
|
18
|
-
from .file import
|
|
18
|
+
from .file import ConnectorFileConfigDict
|
|
19
19
|
from .types import ConnectorType
|
|
20
20
|
from .utils import parse_connector
|
|
21
21
|
|
|
@@ -37,7 +37,7 @@ __all__ = [
|
|
|
37
37
|
'ConnectorProtocol',
|
|
38
38
|
'ConnectorType',
|
|
39
39
|
# Typed Dicts
|
|
40
|
-
'
|
|
41
|
-
'
|
|
42
|
-
'
|
|
40
|
+
'ConnectorApiConfigDict',
|
|
41
|
+
'ConnectorDbConfigDict',
|
|
42
|
+
'ConnectorFileConfigDict',
|
|
43
43
|
]
|
etlplus/connector/api.py
CHANGED
|
@@ -22,9 +22,9 @@ from typing import TypedDict
|
|
|
22
22
|
from typing import overload
|
|
23
23
|
|
|
24
24
|
from ..api import PaginationConfig
|
|
25
|
-
from ..api import
|
|
25
|
+
from ..api import PaginationConfigDict
|
|
26
26
|
from ..api import RateLimitConfig
|
|
27
|
-
from ..api import
|
|
27
|
+
from ..api import RateLimitConfigDict
|
|
28
28
|
from ..types import StrAnyMap
|
|
29
29
|
from ..types import StrStrMap
|
|
30
30
|
from ..utils import cast_str_dict
|
|
@@ -39,14 +39,14 @@ from .types import ConnectorType
|
|
|
39
39
|
|
|
40
40
|
__all__ = [
|
|
41
41
|
'ConnectorApi',
|
|
42
|
-
'
|
|
42
|
+
'ConnectorApiConfigDict',
|
|
43
43
|
]
|
|
44
44
|
|
|
45
45
|
|
|
46
46
|
# SECTION: TYPED DICTS ====================================================== #
|
|
47
47
|
|
|
48
48
|
|
|
49
|
-
class
|
|
49
|
+
class ConnectorApiConfigDict(TypedDict, total=False):
|
|
50
50
|
"""
|
|
51
51
|
Shape accepted by :meth:`ConnectorApi.from_obj` (all keys optional).
|
|
52
52
|
|
|
@@ -61,8 +61,8 @@ class ConnectorApiConfigMap(TypedDict, total=False):
|
|
|
61
61
|
method: str
|
|
62
62
|
headers: StrStrMap
|
|
63
63
|
query_params: StrAnyMap
|
|
64
|
-
pagination:
|
|
65
|
-
rate_limit:
|
|
64
|
+
pagination: PaginationConfigDict
|
|
65
|
+
rate_limit: RateLimitConfigDict
|
|
66
66
|
api: str
|
|
67
67
|
endpoint: str
|
|
68
68
|
|
|
@@ -121,7 +121,7 @@ class ConnectorApi(ConnectorBase):
|
|
|
121
121
|
|
|
122
122
|
@classmethod
|
|
123
123
|
@overload
|
|
124
|
-
def from_obj(cls, obj:
|
|
124
|
+
def from_obj(cls, obj: ConnectorApiConfigDict) -> Self: ...
|
|
125
125
|
|
|
126
126
|
@classmethod
|
|
127
127
|
@overload
|
etlplus/connector/database.py
CHANGED
|
@@ -29,14 +29,14 @@ from .types import ConnectorType
|
|
|
29
29
|
|
|
30
30
|
__all__ = [
|
|
31
31
|
'ConnectorDb',
|
|
32
|
-
'
|
|
32
|
+
'ConnectorDbConfigDict',
|
|
33
33
|
]
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
# SECTION: TYPED DICTS ====================================================== #
|
|
37
37
|
|
|
38
38
|
|
|
39
|
-
class
|
|
39
|
+
class ConnectorDbConfigDict(TypedDict, total=False):
|
|
40
40
|
"""
|
|
41
41
|
Shape accepted by :meth:`ConnectorDb.from_obj` (all keys optional).
|
|
42
42
|
|
|
@@ -87,7 +87,7 @@ class ConnectorDb(ConnectorBase):
|
|
|
87
87
|
|
|
88
88
|
@classmethod
|
|
89
89
|
@overload
|
|
90
|
-
def from_obj(cls, obj:
|
|
90
|
+
def from_obj(cls, obj: ConnectorDbConfigDict) -> Self: ...
|
|
91
91
|
|
|
92
92
|
@classmethod
|
|
93
93
|
@overload
|
etlplus/connector/file.py
CHANGED
|
@@ -32,14 +32,14 @@ from .types import ConnectorType
|
|
|
32
32
|
|
|
33
33
|
__all__ = [
|
|
34
34
|
'ConnectorFile',
|
|
35
|
-
'
|
|
35
|
+
'ConnectorFileConfigDict',
|
|
36
36
|
]
|
|
37
37
|
|
|
38
38
|
|
|
39
39
|
# SECTION: TYPED DICTS ====================================================== #
|
|
40
40
|
|
|
41
41
|
|
|
42
|
-
class
|
|
42
|
+
class ConnectorFileConfigDict(TypedDict, total=False):
|
|
43
43
|
"""
|
|
44
44
|
Shape accepted by :meth:`ConnectorFile.from_obj` (all keys optional).
|
|
45
45
|
|
|
@@ -86,7 +86,7 @@ class ConnectorFile(ConnectorBase):
|
|
|
86
86
|
|
|
87
87
|
@classmethod
|
|
88
88
|
@overload
|
|
89
|
-
def from_obj(cls, obj:
|
|
89
|
+
def from_obj(cls, obj: ConnectorFileConfigDict) -> Self: ...
|
|
90
90
|
|
|
91
91
|
@classmethod
|
|
92
92
|
@overload
|
etlplus/connector/types.py
CHANGED
|
@@ -14,8 +14,8 @@ Examples
|
|
|
14
14
|
>>> "type": "database",
|
|
15
15
|
>>> "connection_string": "postgresql://user:pass@localhost/db",
|
|
16
16
|
>>> }
|
|
17
|
-
>>> from etlplus.api import
|
|
18
|
-
>>> rp:
|
|
17
|
+
>>> from etlplus.api import RetryPolicyDict
|
|
18
|
+
>>> rp: RetryPolicyDict = {"max_attempts": 3, "backoff": 0.5}
|
|
19
19
|
"""
|
|
20
20
|
|
|
21
21
|
from __future__ import annotations
|