etlplus 0.16.0__py3-none-any.whl → 0.16.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (98) hide show
  1. etlplus/README.md +24 -2
  2. etlplus/__init__.py +2 -0
  3. etlplus/api/__init__.py +14 -14
  4. etlplus/api/auth.py +9 -6
  5. etlplus/api/config.py +6 -6
  6. etlplus/api/endpoint_client.py +16 -16
  7. etlplus/api/enums.py +2 -2
  8. etlplus/api/errors.py +4 -4
  9. etlplus/api/pagination/__init__.py +6 -6
  10. etlplus/api/pagination/config.py +11 -9
  11. etlplus/api/rate_limiting/__init__.py +2 -2
  12. etlplus/api/rate_limiting/config.py +10 -10
  13. etlplus/api/rate_limiting/rate_limiter.py +2 -2
  14. etlplus/api/request_manager.py +4 -4
  15. etlplus/api/retry_manager.py +6 -6
  16. etlplus/api/transport.py +10 -10
  17. etlplus/api/types.py +47 -26
  18. etlplus/api/utils.py +49 -49
  19. etlplus/cli/README.md +9 -7
  20. etlplus/cli/commands.py +22 -22
  21. etlplus/cli/handlers.py +12 -13
  22. etlplus/cli/main.py +1 -1
  23. etlplus/{workflow/pipeline.py → config.py} +54 -91
  24. etlplus/connector/__init__.py +6 -6
  25. etlplus/connector/api.py +7 -7
  26. etlplus/connector/database.py +3 -3
  27. etlplus/connector/file.py +3 -3
  28. etlplus/connector/types.py +2 -2
  29. etlplus/database/README.md +7 -7
  30. etlplus/enums.py +35 -167
  31. etlplus/file/README.md +7 -5
  32. etlplus/file/accdb.py +2 -1
  33. etlplus/file/arrow.py +2 -1
  34. etlplus/file/bson.py +2 -1
  35. etlplus/file/cbor.py +2 -1
  36. etlplus/file/cfg.py +1 -1
  37. etlplus/file/conf.py +1 -1
  38. etlplus/file/dat.py +1 -1
  39. etlplus/file/dta.py +1 -1
  40. etlplus/file/duckdb.py +2 -1
  41. etlplus/file/enums.py +1 -1
  42. etlplus/file/fwf.py +2 -1
  43. etlplus/file/hbs.py +2 -1
  44. etlplus/file/hdf5.py +2 -1
  45. etlplus/file/ini.py +2 -1
  46. etlplus/file/ion.py +1 -1
  47. etlplus/file/jinja2.py +2 -1
  48. etlplus/file/log.py +1 -1
  49. etlplus/file/mat.py +1 -1
  50. etlplus/file/mdb.py +2 -1
  51. etlplus/file/msgpack.py +2 -1
  52. etlplus/file/mustache.py +2 -1
  53. etlplus/file/nc.py +1 -1
  54. etlplus/file/numbers.py +2 -1
  55. etlplus/file/ods.py +2 -1
  56. etlplus/file/pb.py +2 -1
  57. etlplus/file/pbf.py +2 -1
  58. etlplus/file/properties.py +2 -1
  59. etlplus/file/proto.py +2 -1
  60. etlplus/file/psv.py +2 -1
  61. etlplus/file/rda.py +2 -1
  62. etlplus/file/rds.py +1 -1
  63. etlplus/file/sas7bdat.py +2 -1
  64. etlplus/file/sav.py +1 -1
  65. etlplus/file/sqlite.py +2 -1
  66. etlplus/file/sylk.py +2 -1
  67. etlplus/file/tab.py +2 -1
  68. etlplus/file/toml.py +2 -1
  69. etlplus/file/vm.py +2 -1
  70. etlplus/file/wks.py +2 -1
  71. etlplus/file/xls.py +1 -1
  72. etlplus/file/xlsm.py +2 -2
  73. etlplus/file/xpt.py +2 -1
  74. etlplus/file/zsav.py +2 -1
  75. etlplus/ops/README.md +10 -9
  76. etlplus/ops/__init__.py +1 -0
  77. etlplus/ops/enums.py +173 -0
  78. etlplus/ops/extract.py +209 -22
  79. etlplus/ops/load.py +140 -34
  80. etlplus/ops/run.py +88 -103
  81. etlplus/ops/transform.py +46 -27
  82. etlplus/ops/types.py +147 -0
  83. etlplus/ops/utils.py +5 -5
  84. etlplus/ops/validate.py +13 -13
  85. etlplus/templates/README.md +11 -9
  86. etlplus/types.py +5 -102
  87. etlplus/workflow/README.md +0 -24
  88. etlplus/workflow/__init__.py +2 -4
  89. etlplus/workflow/dag.py +23 -1
  90. etlplus/workflow/jobs.py +15 -28
  91. etlplus/workflow/profile.py +4 -2
  92. {etlplus-0.16.0.dist-info → etlplus-0.16.7.dist-info}/METADATA +32 -28
  93. etlplus-0.16.7.dist-info/RECORD +143 -0
  94. etlplus-0.16.0.dist-info/RECORD +0 -141
  95. {etlplus-0.16.0.dist-info → etlplus-0.16.7.dist-info}/WHEEL +0 -0
  96. {etlplus-0.16.0.dist-info → etlplus-0.16.7.dist-info}/entry_points.txt +0 -0
  97. {etlplus-0.16.0.dist-info → etlplus-0.16.7.dist-info}/licenses/LICENSE +0 -0
  98. {etlplus-0.16.0.dist-info → etlplus-0.16.7.dist-info}/top_level.txt +0 -0
etlplus/cli/commands.py CHANGED
@@ -62,6 +62,16 @@ __all__ = ['app']
62
62
  # SECTION: TYPE ALIASES ==================================================== #
63
63
 
64
64
 
65
+ ConfigOption = Annotated[
66
+ str,
67
+ typer.Option(
68
+ ...,
69
+ '--config',
70
+ metavar='PATH',
71
+ help='Path to YAML-formatted configuration file.',
72
+ ),
73
+ ]
74
+
65
75
  JobOption = Annotated[
66
76
  str | None,
67
77
  typer.Option(
@@ -97,16 +107,6 @@ OutputOption = Annotated[
97
107
  ),
98
108
  ]
99
109
 
100
- PipelineConfigOption = Annotated[
101
- str,
102
- typer.Option(
103
- ...,
104
- '--config',
105
- metavar='PATH',
106
- help='Path to pipeline YAML configuration file.',
107
- ),
108
- ]
109
-
110
110
  PipelineOption = Annotated[
111
111
  str | None,
112
112
  typer.Option(
@@ -407,7 +407,7 @@ def _root(
407
407
  @app.command('check')
408
408
  def check_cmd(
409
409
  ctx: typer.Context,
410
- config: PipelineConfigOption,
410
+ config: ConfigOption,
411
411
  jobs: JobsOption = False,
412
412
  pipelines: PipelinesOption = False,
413
413
  sources: SourcesOption = False,
@@ -422,20 +422,20 @@ def check_cmd(
422
422
  ----------
423
423
  ctx : typer.Context
424
424
  The Typer context.
425
- config : PipelineConfigOption
425
+ config : ConfigOption
426
426
  Path to pipeline YAML configuration file.
427
- jobs : bool, optional
427
+ jobs : JobsOption, optional
428
428
  List available job names and exit. Default is ``False``.
429
- pipelines : bool, optional
429
+ pipelines : PipelinesOption, optional
430
430
  List ETL pipelines. Default is ``False``.
431
- sources : bool, optional
431
+ sources : SourcesOption, optional
432
432
  List data sources. Default is ``False``.
433
- summary : bool, optional
433
+ summary : SummaryOption, optional
434
434
  Show pipeline summary (name, version, sources, targets, jobs). Default
435
435
  is ``False``.
436
- targets : bool, optional
436
+ targets : TargetsOption, optional
437
437
  List data targets. Default is ``False``.
438
- transforms : bool, optional
438
+ transforms : TransformsOption, optional
439
439
  List data transforms. Default is ``False``.
440
440
 
441
441
  Returns
@@ -725,7 +725,7 @@ def render_cmd(
725
725
  @app.command('run')
726
726
  def run_cmd(
727
727
  ctx: typer.Context,
728
- config: PipelineConfigOption,
728
+ config: ConfigOption,
729
729
  job: JobOption = None,
730
730
  pipeline: PipelineOption = None,
731
731
  ) -> int:
@@ -736,11 +736,11 @@ def run_cmd(
736
736
  ----------
737
737
  ctx : typer.Context
738
738
  The Typer context.
739
- config : PipelineConfigOption
739
+ config : ConfigOption
740
740
  Path to pipeline YAML configuration file.
741
- job : str | None, optional
741
+ job : JobOption, optional
742
742
  Name of the job to run. Default is ``None``.
743
- pipeline : str | None, optional
743
+ pipeline : PipelineOption, optional
744
744
  Name of the pipeline to run. Default is ``None``.
745
745
 
746
746
  Returns
etlplus/cli/handlers.py CHANGED
@@ -14,6 +14,7 @@ from typing import Any
14
14
  from typing import Literal
15
15
  from typing import cast
16
16
 
17
+ from .. import Config
17
18
  from ..database import load_table_spec
18
19
  from ..database import render_tables
19
20
  from ..file import File
@@ -23,11 +24,9 @@ from ..ops import load
23
24
  from ..ops import run
24
25
  from ..ops import transform
25
26
  from ..ops import validate
26
- from ..ops.validate import FieldRules
27
+ from ..ops.validate import FieldRulesDict
27
28
  from ..types import JSONData
28
29
  from ..types import TemplateKey
29
- from ..workflow import PipelineConfig
30
- from ..workflow import load_pipeline_config
31
30
  from . import io as cli_io
32
31
 
33
32
  # SECTION: EXPORTS ========================================================== #
@@ -73,14 +72,14 @@ def _collect_table_specs(
73
72
  specs.append(dict(load_table_spec(Path(spec_path))))
74
73
 
75
74
  if config_path:
76
- cfg = load_pipeline_config(config_path, substitute=True)
75
+ cfg = Config.from_yaml(config_path, substitute=True)
77
76
  specs.extend(getattr(cfg, 'table_schemas', []))
78
77
 
79
78
  return specs
80
79
 
81
80
 
82
81
  def _check_sections(
83
- cfg: PipelineConfig,
82
+ cfg: Config,
84
83
  *,
85
84
  jobs: bool,
86
85
  pipelines: bool,
@@ -93,7 +92,7 @@ def _check_sections(
93
92
 
94
93
  Parameters
95
94
  ----------
96
- cfg : PipelineConfig
95
+ cfg : Config
97
96
  The loaded pipeline configuration.
98
97
  jobs : bool
99
98
  Whether to include job metadata.
@@ -133,14 +132,14 @@ def _check_sections(
133
132
 
134
133
 
135
134
  def _pipeline_summary(
136
- cfg: PipelineConfig,
135
+ cfg: Config,
137
136
  ) -> dict[str, Any]:
138
137
  """
139
138
  Return a human-friendly snapshot of a pipeline config.
140
139
 
141
140
  Parameters
142
141
  ----------
143
- cfg : PipelineConfig
142
+ cfg : Config
144
143
  The loaded pipeline configuration.
145
144
 
146
145
  Returns
@@ -229,7 +228,7 @@ def check_handler(
229
228
  Zero on success.
230
229
 
231
230
  """
232
- cfg = load_pipeline_config(config, substitute=substitute)
231
+ cfg = Config.from_yaml(config, substitute=substitute)
233
232
  if summary:
234
233
  cli_io.emit_json(_pipeline_summary(cfg), pretty=True)
235
234
  return 0
@@ -514,7 +513,7 @@ def run_handler(
514
513
  int
515
514
  Zero on success.
516
515
  """
517
- cfg = load_pipeline_config(config, substitute=True)
516
+ cfg = Config.from_yaml(config, substitute=True)
518
517
 
519
518
  job_name = job or pipeline
520
519
  if job_name:
@@ -662,7 +661,7 @@ def validate_handler(
662
661
  if not isinstance(rules_payload, dict):
663
662
  raise ValueError('rules must resolve to a mapping of field rules')
664
663
 
665
- field_rules = cast(Mapping[str, FieldRules], rules_payload)
664
+ field_rules = cast(Mapping[str, FieldRulesDict], rules_payload)
666
665
  result = validate(payload, field_rules)
667
666
 
668
667
  if target and target != '-':
@@ -671,11 +670,11 @@ def validate_handler(
671
670
  cli_io.write_json_output(
672
671
  validated_data,
673
672
  target,
674
- success_message='Validation result saved to',
673
+ success_message='ValidationDict result saved to',
675
674
  )
676
675
  else:
677
676
  print(
678
- f'Validation failed, no data to save for {target}',
677
+ f'ValidationDict failed, no data to save for {target}',
679
678
  file=sys.stderr,
680
679
  )
681
680
  else:
etlplus/cli/main.py CHANGED
@@ -4,7 +4,7 @@
4
4
  Entry point helpers for the Typer-powered ``etlplus`` CLI.
5
5
 
6
6
  This module exposes :func:`main` for the console script as well as
7
- :func:`create_parser` for callers that still need an ``argparse`` parser.
7
+ :func:`create_parser`, which is kept for compatibility but now raises.
8
8
  """
9
9
 
10
10
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  """
2
- :mod:`etlplus.workflow.pipeline` module.
2
+ :mod:`etlplus.config` module.
3
3
 
4
- Pipeline configuration model and helpers for job orchestration.
4
+ Configuration model and helpers for job pipeline orchestration.
5
5
 
6
6
  Notes
7
7
  -----
@@ -24,46 +24,66 @@ from pathlib import Path
24
24
  from typing import Any
25
25
  from typing import Self
26
26
 
27
- from ..api import ApiConfig
28
- from ..connector import Connector
29
- from ..connector import parse_connector
30
- from ..file import File
31
- from ..file import FileFormat
32
- from ..types import StrAnyMap
33
- from ..utils import coerce_dict
34
- from ..utils import deep_substitute
35
- from ..utils import maybe_mapping
36
- from .jobs import JobConfig
37
- from .profile import ProfileConfig
27
+ from .api import ApiConfig
28
+ from .connector import Connector
29
+ from .connector import parse_connector
30
+ from .file import File
31
+ from .file import FileFormat
32
+ from .types import StrAnyMap
33
+ from .utils import coerce_dict
34
+ from .utils import deep_substitute
35
+ from .utils import maybe_mapping
36
+ from .workflow.jobs import JobConfig
37
+ from .workflow.profile import ProfileConfig
38
38
 
39
39
  # SECTION: EXPORTS ========================================================== #
40
40
 
41
41
 
42
42
  __all__ = [
43
43
  # Data Classes
44
- 'PipelineConfig',
45
- # Functions
46
- 'load_pipeline_config',
44
+ 'Config',
47
45
  ]
48
46
 
49
47
 
50
48
  # SECTION: INTERNAL FUNCTIONS =============================================== #
51
49
 
52
50
 
53
- def _collect_parsed[T](
51
+ def _build_connectors(
54
52
  raw: StrAnyMap,
53
+ *,
55
54
  key: str,
56
- parser: Callable[[Any], T | None],
57
- ) -> list[T]:
55
+ ) -> list[Connector]:
58
56
  """
59
- Collect parsed items from ``raw[key]`` using a tolerant parser.
57
+ Parse connector entries from a list under ``raw[key]``.
60
58
 
61
59
  Parameters
62
60
  ----------
63
61
  raw : StrAnyMap
64
62
  Raw pipeline mapping.
65
63
  key : str
66
- Key pointing to a list-like payload.
64
+ Key pointing to connector entries (e.g., ``"sources"``).
65
+
66
+ Returns
67
+ -------
68
+ list[Connector]
69
+ Parsed connector instances.
70
+ """
71
+ return list(
72
+ _collect_parsed(raw.get(key, []) or [], _parse_connector_entry),
73
+ )
74
+
75
+
76
+ def _collect_parsed[T](
77
+ items: Any,
78
+ parser: Callable[[Any], T | None],
79
+ ) -> list[T]:
80
+ """
81
+ Collect parsed items from ``raw[key]`` using a tolerant parser.
82
+
83
+ Parameters
84
+ ----------
85
+ items : Any
86
+ List-like payload to parse.
67
87
  parser : Callable[[Any], T | None]
68
88
  Parser that returns an instance or ``None`` for invalid entries.
69
89
 
@@ -72,12 +92,12 @@ def _collect_parsed[T](
72
92
  list[T]
73
93
  Parsed items, excluding invalid entries.
74
94
  """
75
- items: list[T] = []
76
- for entry in raw.get(key, []) or []:
95
+ parsed_items: list[T] = []
96
+ for entry in items or []:
77
97
  parsed = parser(entry)
78
98
  if parsed is not None:
79
- items.append(parsed)
80
- return items
99
+ parsed_items.append(parsed)
100
+ return parsed_items
81
101
 
82
102
 
83
103
  def _parse_connector_entry(
@@ -104,71 +124,11 @@ def _parse_connector_entry(
104
124
  return None
105
125
 
106
126
 
107
- def _build_sources(
108
- raw: StrAnyMap,
109
- ) -> list[Connector]:
110
- """
111
- Return a list of source connectors parsed from the mapping.
112
-
113
- Parameters
114
- ----------
115
- raw : StrAnyMap
116
- Raw pipeline mapping.
117
-
118
- Returns
119
- -------
120
- list[Connector]
121
- Parsed source connectors.
122
- """
123
- return list(
124
- _collect_parsed(raw, 'sources', _parse_connector_entry),
125
- )
126
-
127
-
128
- def _build_targets(
129
- raw: StrAnyMap,
130
- ) -> list[Connector]:
131
- """
132
- Return a list of target connectors parsed from the mapping.
133
-
134
- Parameters
135
- ----------
136
- raw : StrAnyMap
137
- Raw pipeline mapping.
138
-
139
- Returns
140
- -------
141
- list[Connector]
142
- Parsed target connectors.
143
- """
144
- return list(
145
- _collect_parsed(raw, 'targets', _parse_connector_entry),
146
- )
147
-
148
-
149
- # SECTION: FUNCTIONS ======================================================== #
150
-
151
-
152
- def load_pipeline_config(
153
- path: Path | str,
154
- *,
155
- substitute: bool = False,
156
- env: Mapping[str, str] | None = None,
157
- ) -> PipelineConfig:
158
- """
159
- Load a pipeline YAML file into a ``PipelineConfig`` instance.
160
-
161
- Delegates to ``PipelineConfig.from_yaml`` for construction and optional
162
- variable substitution.
163
- """
164
- return PipelineConfig.from_yaml(path, substitute=substitute, env=env)
165
-
166
-
167
127
  # SECTION: DATA CLASSES ===================================================== #
168
128
 
169
129
 
170
130
  @dataclass(kw_only=True, slots=True)
171
- class PipelineConfig:
131
+ class Config:
172
132
  """
173
133
  Configuration for the data processing pipeline.
174
134
 
@@ -231,7 +191,7 @@ class PipelineConfig:
231
191
  env: Mapping[str, str] | None = None,
232
192
  ) -> Self:
233
193
  """
234
- Parse a YAML file into a ``PipelineConfig`` instance.
194
+ Parse a YAML file into a ``Config`` instance.
235
195
 
236
196
  Parameters
237
197
  ----------
@@ -279,7 +239,7 @@ class PipelineConfig:
279
239
  raw: StrAnyMap,
280
240
  ) -> Self:
281
241
  """
282
- Parse a mapping into a ``PipelineConfig`` instance.
242
+ Parse a mapping into a ``Config`` instance.
283
243
 
284
244
  Parameters
285
245
  ----------
@@ -311,17 +271,20 @@ class PipelineConfig:
311
271
  file_systems = coerce_dict(raw.get('file_systems'))
312
272
 
313
273
  # Sources
314
- sources = _build_sources(raw)
274
+ sources = _build_connectors(raw, key='sources')
315
275
 
316
276
  # Validations/Transforms
317
277
  validations = coerce_dict(raw.get('validations'))
318
278
  transforms = coerce_dict(raw.get('transforms'))
319
279
 
320
280
  # Targets
321
- targets = _build_targets(raw)
281
+ targets = _build_connectors(raw, key='targets')
322
282
 
323
283
  # Jobs
324
- jobs = _collect_parsed(raw, 'jobs', JobConfig.from_obj)
284
+ jobs: list[JobConfig] = _collect_parsed(
285
+ raw.get('jobs', []) or [],
286
+ JobConfig.from_obj,
287
+ )
325
288
 
326
289
  # Table schemas (optional, tolerant pass-through structures).
327
290
  table_schemas: list[dict[str, Any]] = []
@@ -7,15 +7,15 @@ Connector configuration types and enums.
7
7
  from __future__ import annotations
8
8
 
9
9
  from .api import ConnectorApi
10
- from .api import ConnectorApiConfigMap
10
+ from .api import ConnectorApiConfigDict
11
11
  from .connector import Connector
12
12
  from .core import ConnectorBase
13
13
  from .core import ConnectorProtocol
14
14
  from .database import ConnectorDb
15
- from .database import ConnectorDbConfigMap
15
+ from .database import ConnectorDbConfigDict
16
16
  from .enums import DataConnectorType
17
17
  from .file import ConnectorFile
18
- from .file import ConnectorFileConfigMap
18
+ from .file import ConnectorFileConfigDict
19
19
  from .types import ConnectorType
20
20
  from .utils import parse_connector
21
21
 
@@ -37,7 +37,7 @@ __all__ = [
37
37
  'ConnectorProtocol',
38
38
  'ConnectorType',
39
39
  # Typed Dicts
40
- 'ConnectorApiConfigMap',
41
- 'ConnectorDbConfigMap',
42
- 'ConnectorFileConfigMap',
40
+ 'ConnectorApiConfigDict',
41
+ 'ConnectorDbConfigDict',
42
+ 'ConnectorFileConfigDict',
43
43
  ]
etlplus/connector/api.py CHANGED
@@ -22,9 +22,9 @@ from typing import TypedDict
22
22
  from typing import overload
23
23
 
24
24
  from ..api import PaginationConfig
25
- from ..api import PaginationConfigMap
25
+ from ..api import PaginationConfigDict
26
26
  from ..api import RateLimitConfig
27
- from ..api import RateLimitConfigMap
27
+ from ..api import RateLimitConfigDict
28
28
  from ..types import StrAnyMap
29
29
  from ..types import StrStrMap
30
30
  from ..utils import cast_str_dict
@@ -39,14 +39,14 @@ from .types import ConnectorType
39
39
 
40
40
  __all__ = [
41
41
  'ConnectorApi',
42
- 'ConnectorApiConfigMap',
42
+ 'ConnectorApiConfigDict',
43
43
  ]
44
44
 
45
45
 
46
46
  # SECTION: TYPED DICTS ====================================================== #
47
47
 
48
48
 
49
- class ConnectorApiConfigMap(TypedDict, total=False):
49
+ class ConnectorApiConfigDict(TypedDict, total=False):
50
50
  """
51
51
  Shape accepted by :meth:`ConnectorApi.from_obj` (all keys optional).
52
52
 
@@ -61,8 +61,8 @@ class ConnectorApiConfigMap(TypedDict, total=False):
61
61
  method: str
62
62
  headers: StrStrMap
63
63
  query_params: StrAnyMap
64
- pagination: PaginationConfigMap
65
- rate_limit: RateLimitConfigMap
64
+ pagination: PaginationConfigDict
65
+ rate_limit: RateLimitConfigDict
66
66
  api: str
67
67
  endpoint: str
68
68
 
@@ -121,7 +121,7 @@ class ConnectorApi(ConnectorBase):
121
121
 
122
122
  @classmethod
123
123
  @overload
124
- def from_obj(cls, obj: ConnectorApiConfigMap) -> Self: ...
124
+ def from_obj(cls, obj: ConnectorApiConfigDict) -> Self: ...
125
125
 
126
126
  @classmethod
127
127
  @overload
@@ -29,14 +29,14 @@ from .types import ConnectorType
29
29
 
30
30
  __all__ = [
31
31
  'ConnectorDb',
32
- 'ConnectorDbConfigMap',
32
+ 'ConnectorDbConfigDict',
33
33
  ]
34
34
 
35
35
 
36
36
  # SECTION: TYPED DICTS ====================================================== #
37
37
 
38
38
 
39
- class ConnectorDbConfigMap(TypedDict, total=False):
39
+ class ConnectorDbConfigDict(TypedDict, total=False):
40
40
  """
41
41
  Shape accepted by :meth:`ConnectorDb.from_obj` (all keys optional).
42
42
 
@@ -87,7 +87,7 @@ class ConnectorDb(ConnectorBase):
87
87
 
88
88
  @classmethod
89
89
  @overload
90
- def from_obj(cls, obj: ConnectorDbConfigMap) -> Self: ...
90
+ def from_obj(cls, obj: ConnectorDbConfigDict) -> Self: ...
91
91
 
92
92
  @classmethod
93
93
  @overload
etlplus/connector/file.py CHANGED
@@ -32,14 +32,14 @@ from .types import ConnectorType
32
32
 
33
33
  __all__ = [
34
34
  'ConnectorFile',
35
- 'ConnectorFileConfigMap',
35
+ 'ConnectorFileConfigDict',
36
36
  ]
37
37
 
38
38
 
39
39
  # SECTION: TYPED DICTS ====================================================== #
40
40
 
41
41
 
42
- class ConnectorFileConfigMap(TypedDict, total=False):
42
+ class ConnectorFileConfigDict(TypedDict, total=False):
43
43
  """
44
44
  Shape accepted by :meth:`ConnectorFile.from_obj` (all keys optional).
45
45
 
@@ -86,7 +86,7 @@ class ConnectorFile(ConnectorBase):
86
86
 
87
87
  @classmethod
88
88
  @overload
89
- def from_obj(cls, obj: ConnectorFileConfigMap) -> Self: ...
89
+ def from_obj(cls, obj: ConnectorFileConfigDict) -> Self: ...
90
90
 
91
91
  @classmethod
92
92
  @overload
@@ -14,8 +14,8 @@ Examples
14
14
  >>> "type": "database",
15
15
  >>> "connection_string": "postgresql://user:pass@localhost/db",
16
16
  >>> }
17
- >>> from etlplus.api import RetryPolicy
18
- >>> rp: RetryPolicy = {"max_attempts": 3, "backoff": 0.5}
17
+ >>> from etlplus.api import RetryPolicyDict
18
+ >>> rp: RetryPolicyDict = {"max_attempts": 3, "backoff": 0.5}
19
19
  """
20
20
 
21
21
  from __future__ import annotations
@@ -18,8 +18,8 @@ Back to project overview: see the top-level [README](../../README.md).
18
18
 
19
19
  ## Database Engine and Connections
20
20
 
21
- - Manage connections to supported databases
22
- - Configure engines for different backends
21
+ - Build SQLAlchemy engines with `make_engine`
22
+ - Load connection strings from pipeline configs
23
23
 
24
24
  ## Schema and DDL Helpers
25
25
 
@@ -31,14 +31,14 @@ Back to project overview: see the top-level [README](../../README.md).
31
31
  - Map rows to Python objects
32
32
  - Simple CRUD helpers for tabular data
33
33
 
34
- ## Example: Creating a Table
34
+ ## Example: Rendering DDL From a Spec
35
35
 
36
36
  ```python
37
- from etlplus.database import Schema, Engine
37
+ from etlplus.database import load_table_spec, render_table_sql
38
38
 
39
- engine = Engine.connect("sqlite:///example.db")
40
- schema = Schema.from_dict({"name": "users", "columns": [ ... ]})
41
- engine.create_table(schema)
39
+ spec = load_table_spec("schemas/users.yml")
40
+ sql = render_table_sql(spec, template="ddl")
41
+ print(sql)
42
42
  ```
43
43
 
44
44
  ## See Also