etlplus 0.4.7__py3-none-any.whl → 0.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/cli/app.py CHANGED
@@ -25,6 +25,7 @@ Subcommands
25
25
  - ``validate``: validate data against rules
26
26
  - ``transform``: transform records
27
27
  - ``load``: load data to files, databases, or REST APIs
28
+ - ``render``: render SQL DDL from table schema specs
28
29
 
29
30
  Notes
30
31
  -----
@@ -56,13 +57,14 @@ from .. import __version__
56
57
  from ..enums import DataConnectorType
57
58
  from ..enums import FileFormat
58
59
  from ..utils import json_type
59
- from .handlers import cmd_extract
60
- from .handlers import cmd_list
61
- from .handlers import cmd_load
62
- from .handlers import cmd_pipeline
63
- from .handlers import cmd_run
64
- from .handlers import cmd_transform
65
- from .handlers import cmd_validate
60
+ from .handlers import check_handler
61
+ from .handlers import extract_handler
62
+ from .handlers import load_handler
63
+ from .handlers import pipeline_handler
64
+ from .handlers import render_handler
65
+ from .handlers import run_handler
66
+ from .handlers import transform_handler
67
+ from .handlers import validate_handler
66
68
 
67
69
  # SECTION: EXPORTS ========================================================== #
68
70
 
@@ -125,62 +127,90 @@ PROJECT_URL: Final[str] = 'https://github.com/Dagitali/ETLPlus'
125
127
  # SECTION: TYPE ALIASES ==================================================== #
126
128
 
127
129
 
128
- SourceInputArg = Annotated[
130
+ OperationsJSONOption = Annotated[
129
131
  str,
130
- typer.Argument(
131
- ...,
132
- metavar='SOURCE',
133
- help=(
134
- 'Extract from SOURCE. Use --from/--source-type to override the '
135
- 'inferred connector when needed.'
136
- ),
132
+ typer.Option(
133
+ '--operations',
134
+ help='Transformation operations as JSON string.',
137
135
  ),
138
136
  ]
139
137
 
140
- StreamingSourceArg = Annotated[
138
+ PipelineConfigOption = Annotated[
141
139
  str,
142
- typer.Argument(
140
+ typer.Option(
143
141
  ...,
144
- metavar='SOURCE',
145
- help=(
146
- 'Data source to transform or validate (path, JSON payload, or '
147
- '- for stdin).'
148
- ),
142
+ '--config',
143
+ metavar='PATH',
144
+ help='Path to pipeline YAML configuration file.',
149
145
  ),
150
146
  ]
151
147
 
152
- TargetInputArg = Annotated[
153
- str,
154
- typer.Argument(
155
- ...,
156
- metavar='TARGET',
157
- help=(
158
- 'Load JSON data from stdin into TARGET. Use --to/--target-type '
159
- 'to override connector inference when needed. Source data must '
160
- 'be piped into stdin.'
161
- ),
148
+ RenderConfigOption = Annotated[
149
+ str | None,
150
+ typer.Option(
151
+ '--config',
152
+ metavar='PATH',
153
+ help='Pipeline YAML that includes table_schemas for rendering.',
154
+ show_default=False,
162
155
  ),
163
156
  ]
164
157
 
165
- SourceOverrideOption = Annotated[
158
+ RenderOutputOption = Annotated[
166
159
  str | None,
167
160
  typer.Option(
168
- '--source-type',
169
- metavar='CONNECTOR',
170
- show_default=False,
171
- rich_help_panel='I/O overrides',
172
- help='Override the inferred source type (file, database, api).',
161
+ '--output',
162
+ '-o',
163
+ metavar='PATH',
164
+ help='Write rendered SQL to PATH (default: stdout).',
173
165
  ),
174
166
  ]
175
167
 
176
- TargetOverrideOption = Annotated[
168
+ RenderSpecOption = Annotated[
177
169
  str | None,
178
170
  typer.Option(
179
- '--target-type',
180
- metavar='CONNECTOR',
171
+ '--spec',
172
+ metavar='PATH',
173
+ help='Standalone table spec file (.yml/.yaml/.json).',
181
174
  show_default=False,
182
- rich_help_panel='I/O overrides',
183
- help='Override the inferred target type (file, database, api).',
175
+ ),
176
+ ]
177
+
178
+ RenderTableOption = Annotated[
179
+ str | None,
180
+ typer.Option(
181
+ '--table',
182
+ metavar='NAME',
183
+ help='Filter to a single table name from table_schemas.',
184
+ ),
185
+ ]
186
+
187
+ RenderTemplateOption = Annotated[
188
+ str,
189
+ typer.Option(
190
+ '--template',
191
+ '-t',
192
+ metavar='KEY|PATH',
193
+ help='Template key (ddl/view) or path to a Jinja template file.',
194
+ show_default=True,
195
+ ),
196
+ ]
197
+
198
+ RenderTemplatePathOption = Annotated[
199
+ str | None,
200
+ typer.Option(
201
+ '--template-path',
202
+ metavar='PATH',
203
+ help=(
204
+ 'Explicit path to a Jinja template file (overrides template key).'
205
+ ),
206
+ ),
207
+ ]
208
+
209
+ RulesJSONOption = Annotated[
210
+ str,
211
+ typer.Option(
212
+ '--rules',
213
+ help='Validation rules as JSON string.',
184
214
  ),
185
215
  ]
186
216
 
@@ -198,6 +228,29 @@ SourceFormatOption = Annotated[
198
228
  ),
199
229
  ]
200
230
 
231
+ SourceInputArg = Annotated[
232
+ str,
233
+ typer.Argument(
234
+ ...,
235
+ metavar='SOURCE',
236
+ help=(
237
+ 'Extract from SOURCE. Use --from/--source-type to override the '
238
+ 'inferred connector when needed.'
239
+ ),
240
+ ),
241
+ ]
242
+
243
+ SourceOverrideOption = Annotated[
244
+ str | None,
245
+ typer.Option(
246
+ '--source-type',
247
+ metavar='CONNECTOR',
248
+ show_default=False,
249
+ rich_help_panel='I/O overrides',
250
+ help='Override the inferred source type (file, database, api).',
251
+ ),
252
+ ]
253
+
201
254
  StdinFormatOption = Annotated[
202
255
  str | None,
203
256
  typer.Option(
@@ -209,6 +262,18 @@ StdinFormatOption = Annotated[
209
262
  ),
210
263
  ]
211
264
 
265
+ StreamingSourceArg = Annotated[
266
+ str,
267
+ typer.Argument(
268
+ ...,
269
+ metavar='SOURCE',
270
+ help=(
271
+ 'Data source to transform or validate (path, JSON payload, or '
272
+ '- for stdin).'
273
+ ),
274
+ ),
275
+ ]
276
+
212
277
  TargetFormatOption = Annotated[
213
278
  str | None,
214
279
  typer.Option(
@@ -223,19 +288,27 @@ TargetFormatOption = Annotated[
223
288
  ),
224
289
  ]
225
290
 
226
- OperationsJSONOption = Annotated[
291
+ TargetInputArg = Annotated[
227
292
  str,
228
- typer.Option(
229
- '--operations',
230
- help='Transformation operations as JSON string.',
293
+ typer.Argument(
294
+ ...,
295
+ metavar='TARGET',
296
+ help=(
297
+ 'Load JSON data from stdin into TARGET. Use --to/--target-type '
298
+ 'to override connector inference when needed. Source data must '
299
+ 'be piped into stdin.'
300
+ ),
231
301
  ),
232
302
  ]
233
303
 
234
- RulesJSONOption = Annotated[
235
- str,
304
+ TargetOverrideOption = Annotated[
305
+ str | None,
236
306
  typer.Option(
237
- '--rules',
238
- help='Validation rules as JSON string.',
307
+ '--target-type',
308
+ metavar='CONNECTOR',
309
+ show_default=False,
310
+ rich_help_panel='I/O overrides',
311
+ help='Override the inferred target type (file, database, api).',
239
312
  ),
240
313
  ]
241
314
 
@@ -248,16 +321,6 @@ TargetPathOption = Annotated[
248
321
  ),
249
322
  ]
250
323
 
251
- PipelineConfigOption = Annotated[
252
- str,
253
- typer.Option(
254
- ...,
255
- '--config',
256
- metavar='PATH',
257
- help='Path to pipeline YAML configuration file.',
258
- ),
259
- ]
260
-
261
324
 
262
325
  # SECTION: DATA CLASSES ===================================================== #
263
326
 
@@ -682,6 +745,83 @@ def _root(
682
745
  raise typer.Exit(0)
683
746
 
684
747
 
748
+ @app.command('check')
749
+ def check_cmd(
750
+ ctx: typer.Context,
751
+ config: PipelineConfigOption,
752
+ jobs: bool = typer.Option(
753
+ False,
754
+ '--jobs',
755
+ help='List available job names and exit',
756
+ ),
757
+ pipelines: bool = typer.Option(
758
+ False,
759
+ '--pipelines',
760
+ help='List ETL pipelines',
761
+ ),
762
+ sources: bool = typer.Option(
763
+ False,
764
+ '--sources',
765
+ help='List data sources',
766
+ ),
767
+ summary: bool = typer.Option(
768
+ False,
769
+ '--summary',
770
+ help='Show pipeline summary (name, version, sources, targets, jobs)',
771
+ ),
772
+ targets: bool = typer.Option(
773
+ False,
774
+ '--targets',
775
+ help='List data targets',
776
+ ),
777
+ transforms: bool = typer.Option(
778
+ False,
779
+ '--transforms',
780
+ help='List data transforms',
781
+ ),
782
+ ) -> int:
783
+ """
784
+ Print ETL entities from a pipeline YAML configuration.
785
+
786
+ Parameters
787
+ ----------
788
+ ctx : typer.Context
789
+ Typer execution context provided to the command.
790
+ config : PipelineConfigOption
791
+ Path to pipeline YAML configuration file.
792
+ jobs : bool, optional
793
+ If True, list available job names and exit.
794
+ pipelines : bool, optional
795
+ If True, list ETL pipelines.
796
+ sources : bool, optional
797
+ If True, list data sources.
798
+ summary : bool, optional
799
+ If True, show pipeline summary (name, version, sources, targets, jobs).
800
+ targets : bool, optional
801
+ If True, list data targets.
802
+ transforms : bool, optional
803
+ If True, list data transforms.
804
+
805
+ Returns
806
+ -------
807
+ int
808
+ Zero on success.
809
+ """
810
+ state = _ensure_state(ctx)
811
+ ns = _stateful_namespace(
812
+ state,
813
+ command='check',
814
+ config=config,
815
+ summary=summary,
816
+ pipelines=pipelines,
817
+ jobs=jobs,
818
+ sources=sources,
819
+ targets=targets,
820
+ transforms=transforms,
821
+ )
822
+ return int(check_handler(ns))
823
+
824
+
685
825
  @app.command('extract')
686
826
  def extract_cmd(
687
827
  ctx: typer.Context,
@@ -765,68 +905,7 @@ def extract_cmd(
765
905
  source=resolved_source,
766
906
  **format_kwargs,
767
907
  )
768
- return int(cmd_extract(ns))
769
-
770
-
771
- @app.command('list')
772
- def list_cmd(
773
- ctx: typer.Context,
774
- config: PipelineConfigOption,
775
- jobs: bool = typer.Option(
776
- False,
777
- '--jobs',
778
- help='List available job names and exit',
779
- ),
780
- pipelines: bool = typer.Option(
781
- False,
782
- '--pipelines',
783
- help='List ETL pipelines',
784
- ),
785
- sources: bool = typer.Option(False, '--sources', help='List data sources'),
786
- targets: bool = typer.Option(False, '--targets', help='List data targets'),
787
- transforms: bool = typer.Option(
788
- False,
789
- '--transforms',
790
- help='List data transforms',
791
- ),
792
- ) -> int:
793
- """
794
- Print ETL entities from a pipeline YAML configuration.
795
-
796
- Parameters
797
- ----------
798
- ctx : typer.Context
799
- Typer execution context provided to the command.
800
- config : PipelineConfigOption
801
- Path to pipeline YAML configuration file.
802
- jobs : bool, optional
803
- If True, list available job names and exit.
804
- pipelines : bool, optional
805
- If True, list ETL pipelines.
806
- sources : bool, optional
807
- If True, list data sources.
808
- targets : bool, optional
809
- If True, list data targets.
810
- transforms : bool, optional
811
- If True, list data transforms.
812
-
813
- Returns
814
- -------
815
- int
816
- Zero on success.
817
- """
818
- state = _ensure_state(ctx)
819
- ns = _stateful_namespace(
820
- state,
821
- command='list',
822
- config=config,
823
- pipelines=pipelines,
824
- jobs=jobs,
825
- sources=sources,
826
- targets=targets,
827
- transforms=transforms,
828
- )
829
- return int(cmd_list(ns))
908
+ return int(extract_handler(ns))
830
909
 
831
910
 
832
911
  @app.command('load')
@@ -928,7 +1007,7 @@ def load_cmd(
928
1007
  target=resolved_target,
929
1008
  **format_kwargs,
930
1009
  )
931
- return int(cmd_load(ns))
1010
+ return int(load_handler(ns))
932
1011
 
933
1012
 
934
1013
  @app.command('pipeline')
@@ -953,7 +1032,7 @@ def pipeline_cmd(
953
1032
  ),
954
1033
  ) -> int:
955
1034
  """
956
- Inspect or run a pipeline YAML configuration.
1035
+ Deprecated wrapper to inspect or run a pipeline YAML configuration.
957
1036
 
958
1037
  Parameters
959
1038
  ----------
@@ -982,7 +1061,56 @@ def pipeline_cmd(
982
1061
  list=jobs,
983
1062
  run=run_target,
984
1063
  )
985
- return int(cmd_pipeline(ns))
1064
+ return int(pipeline_handler(ns))
1065
+
1066
+
1067
+ @app.command('render')
1068
+ def render_cmd(
1069
+ ctx: typer.Context,
1070
+ config: RenderConfigOption = None,
1071
+ spec: RenderSpecOption = None,
1072
+ table: RenderTableOption = None,
1073
+ template: RenderTemplateOption = 'ddl',
1074
+ template_path: RenderTemplatePathOption = None,
1075
+ output: RenderOutputOption = None,
1076
+ ) -> int:
1077
+ """
1078
+ Render SQL DDL from table schemas defined in YAML/JSON configs.
1079
+
1080
+ Parameters
1081
+ ----------
1082
+ ctx : typer.Context
1083
+ Typer execution context provided to the command.
1084
+ config : RenderConfigOption, optional
1085
+ Pipeline YAML containing ``table_schemas`` entries.
1086
+ spec : RenderSpecOption, optional
1087
+ Standalone table spec file (.yml/.yaml/.json).
1088
+ table : RenderTableOption, optional
1089
+ Filter to a single table name within the available specs.
1090
+ template : RenderTemplateOption, optional
1091
+ Built-in template key or template file path.
1092
+ template_path : RenderTemplatePathOption, optional
1093
+ Explicit template file path to render with.
1094
+ output : RenderOutputOption, optional
1095
+ Path to write SQL to (stdout when omitted).
1096
+
1097
+ Returns
1098
+ -------
1099
+ int
1100
+ Zero on success.
1101
+ """
1102
+ state = _ensure_state(ctx)
1103
+ ns = _stateful_namespace(
1104
+ state,
1105
+ command='render',
1106
+ config=config,
1107
+ spec=spec,
1108
+ table=table,
1109
+ template=template,
1110
+ template_path=template_path,
1111
+ output=output,
1112
+ )
1113
+ return int(render_handler(ns))
986
1114
 
987
1115
 
988
1116
  @app.command('run')
@@ -1029,7 +1157,7 @@ def run_cmd(
1029
1157
  job=job,
1030
1158
  pipeline=pipeline,
1031
1159
  )
1032
- return int(cmd_run(ns))
1160
+ return int(run_handler(ns))
1033
1161
 
1034
1162
 
1035
1163
  @app.command('transform')
@@ -1166,7 +1294,7 @@ def transform_cmd(
1166
1294
  target_format=target_format_kwargs['format'],
1167
1295
  **target_format_kwargs,
1168
1296
  )
1169
- return int(cmd_transform(ns))
1297
+ return int(transform_handler(ns))
1170
1298
 
1171
1299
 
1172
1300
  @app.command('validate')
@@ -1236,4 +1364,4 @@ def validate_cmd(
1236
1364
  source_format=source_format,
1237
1365
  **source_format_kwargs,
1238
1366
  )
1239
- return int(cmd_validate(ns))
1367
+ return int(validate_handler(ns))