etlplus 0.5.3__py3-none-any.whl → 0.5.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/cli/app.py CHANGED
@@ -57,14 +57,14 @@ from .. import __version__
57
57
  from ..enums import DataConnectorType
58
58
  from ..enums import FileFormat
59
59
  from ..utils import json_type
60
- from .handlers import cmd_extract
61
- from .handlers import cmd_list
62
- from .handlers import cmd_load
63
- from .handlers import cmd_pipeline
64
- from .handlers import cmd_render
65
- from .handlers import cmd_run
66
- from .handlers import cmd_transform
67
- from .handlers import cmd_validate
60
+ from .handlers import check_handler
61
+ from .handlers import extract_handler
62
+ from .handlers import load_handler
63
+ from .handlers import pipeline_handler
64
+ from .handlers import render_handler
65
+ from .handlers import run_handler
66
+ from .handlers import transform_handler
67
+ from .handlers import validate_handler
68
68
 
69
69
  # SECTION: EXPORTS ========================================================== #
70
70
 
@@ -127,109 +127,82 @@ PROJECT_URL: Final[str] = 'https://github.com/Dagitali/ETLPlus'
127
127
  # SECTION: TYPE ALIASES ==================================================== #
128
128
 
129
129
 
130
- SourceInputArg = Annotated[
131
- str,
132
- typer.Argument(
133
- ...,
134
- metavar='SOURCE',
135
- help=(
136
- 'Extract from SOURCE. Use --from/--source-type to override the '
137
- 'inferred connector when needed.'
138
- ),
139
- ),
140
- ]
141
-
142
- StreamingSourceArg = Annotated[
130
+ OperationsJSONOption = Annotated[
143
131
  str,
144
- typer.Argument(
145
- ...,
146
- metavar='SOURCE',
147
- help=(
148
- 'Data source to transform or validate (path, JSON payload, or '
149
- '- for stdin).'
150
- ),
132
+ typer.Option(
133
+ '--operations',
134
+ help='Transformation operations as JSON string.',
151
135
  ),
152
136
  ]
153
137
 
154
- TargetInputArg = Annotated[
138
+ PipelineConfigOption = Annotated[
155
139
  str,
156
- typer.Argument(
140
+ typer.Option(
157
141
  ...,
158
- metavar='TARGET',
159
- help=(
160
- 'Load JSON data from stdin into TARGET. Use --to/--target-type '
161
- 'to override connector inference when needed. Source data must '
162
- 'be piped into stdin.'
163
- ),
142
+ '--config',
143
+ metavar='PATH',
144
+ help='Path to pipeline YAML configuration file.',
164
145
  ),
165
146
  ]
166
147
 
167
- SourceOverrideOption = Annotated[
148
+ RenderConfigOption = Annotated[
168
149
  str | None,
169
150
  typer.Option(
170
- '--source-type',
171
- metavar='CONNECTOR',
151
+ '--config',
152
+ metavar='PATH',
153
+ help='Pipeline YAML that includes table_schemas for rendering.',
172
154
  show_default=False,
173
- rich_help_panel='I/O overrides',
174
- help='Override the inferred source type (file, database, api).',
175
155
  ),
176
156
  ]
177
157
 
178
- TargetOverrideOption = Annotated[
158
+ RenderOutputOption = Annotated[
179
159
  str | None,
180
160
  typer.Option(
181
- '--target-type',
182
- metavar='CONNECTOR',
183
- show_default=False,
184
- rich_help_panel='I/O overrides',
185
- help='Override the inferred target type (file, database, api).',
161
+ '--output',
162
+ '-o',
163
+ metavar='PATH',
164
+ help='Write rendered SQL to PATH (default: stdout).',
186
165
  ),
187
166
  ]
188
167
 
189
- SourceFormatOption = Annotated[
168
+ RenderSpecOption = Annotated[
190
169
  str | None,
191
170
  typer.Option(
192
- '--source-format',
193
- metavar='FORMAT',
171
+ '--spec',
172
+ metavar='PATH',
173
+ help='Standalone table spec file (.yml/.yaml/.json).',
194
174
  show_default=False,
195
- rich_help_panel='Format overrides',
196
- help=(
197
- 'Input payload format when SOURCE is - or an inline payload. '
198
- 'File sources infer format from the extension.'
199
- ),
200
175
  ),
201
176
  ]
202
177
 
203
- StdinFormatOption = Annotated[
178
+ RenderTableOption = Annotated[
204
179
  str | None,
205
180
  typer.Option(
206
- '--source-format',
207
- metavar='FORMAT',
208
- show_default=False,
209
- rich_help_panel='Format overrides',
210
- help='Input payload format when reading from stdin (default: json).',
181
+ '--table',
182
+ metavar='NAME',
183
+ help='Filter to a single table name from table_schemas.',
211
184
  ),
212
185
  ]
213
186
 
214
- TargetFormatOption = Annotated[
215
- str | None,
187
+ RenderTemplateOption = Annotated[
188
+ str,
216
189
  typer.Option(
217
- '--target-format',
218
- metavar='FORMAT',
219
- show_default=False,
220
- rich_help_panel='Format overrides',
221
- help=(
222
- 'Payload format when TARGET is - or a non-file connector. File '
223
- 'targets infer format from the extension.'
224
- ),
190
+ '--template',
191
+ '-t',
192
+ metavar='KEY|PATH',
193
+ help='Template key (ddl/view) or path to a Jinja template file.',
194
+ show_default=True,
225
195
  ),
226
196
  ]
227
197
 
228
- OperationsJSONOption = Annotated[
229
- str,
198
+ RenderTemplatePathOption = Annotated[
199
+ str | None,
230
200
  typer.Option(
231
- '--operations',
232
- help='Transformation operations as JSON string.',
201
+ '--template-path',
202
+ metavar='PATH',
203
+ help=(
204
+ 'Explicit path to a Jinja template file (overrides template key).'
205
+ ),
233
206
  ),
234
207
  ]
235
208
 
@@ -241,83 +214,110 @@ RulesJSONOption = Annotated[
241
214
  ),
242
215
  ]
243
216
 
244
- TargetPathOption = Annotated[
217
+ SourceFormatOption = Annotated[
245
218
  str | None,
246
219
  typer.Option(
247
- '--target',
248
- metavar='PATH',
249
- help='Target file for transformed or validated output (- for stdout).',
220
+ '--source-format',
221
+ metavar='FORMAT',
222
+ show_default=False,
223
+ rich_help_panel='Format overrides',
224
+ help=(
225
+ 'Input payload format when SOURCE is - or an inline payload. '
226
+ 'File sources infer format from the extension.'
227
+ ),
250
228
  ),
251
229
  ]
252
230
 
253
- PipelineConfigOption = Annotated[
231
+ SourceInputArg = Annotated[
254
232
  str,
255
- typer.Option(
233
+ typer.Argument(
256
234
  ...,
257
- '--config',
258
- metavar='PATH',
259
- help='Path to pipeline YAML configuration file.',
235
+ metavar='SOURCE',
236
+ help=(
237
+ 'Extract from SOURCE. Use --from/--source-type to override the '
238
+ 'inferred connector when needed.'
239
+ ),
260
240
  ),
261
241
  ]
262
242
 
263
- RenderConfigOption = Annotated[
243
+ SourceOverrideOption = Annotated[
264
244
  str | None,
265
245
  typer.Option(
266
- '--config',
267
- metavar='PATH',
268
- help='Pipeline YAML that includes table_schemas for rendering.',
246
+ '--source-type',
247
+ metavar='CONNECTOR',
269
248
  show_default=False,
249
+ rich_help_panel='I/O overrides',
250
+ help='Override the inferred source type (file, database, api).',
270
251
  ),
271
252
  ]
272
253
 
273
- RenderOutputOption = Annotated[
254
+ StdinFormatOption = Annotated[
274
255
  str | None,
275
256
  typer.Option(
276
- '--output',
277
- '-o',
278
- metavar='PATH',
279
- help='Write rendered SQL to PATH (default: stdout).',
257
+ '--source-format',
258
+ metavar='FORMAT',
259
+ show_default=False,
260
+ rich_help_panel='Format overrides',
261
+ help='Input payload format when reading from stdin (default: json).',
280
262
  ),
281
263
  ]
282
264
 
283
- RenderSpecOption = Annotated[
284
- str | None,
285
- typer.Option(
286
- '--spec',
287
- metavar='PATH',
288
- help='Standalone table spec file (.yml/.yaml/.json).',
289
- show_default=False,
265
+ StreamingSourceArg = Annotated[
266
+ str,
267
+ typer.Argument(
268
+ ...,
269
+ metavar='SOURCE',
270
+ help=(
271
+ 'Data source to transform or validate (path, JSON payload, or '
272
+ '- for stdin).'
273
+ ),
290
274
  ),
291
275
  ]
292
276
 
293
- RenderTableOption = Annotated[
277
+ TargetFormatOption = Annotated[
294
278
  str | None,
295
279
  typer.Option(
296
- '--table',
297
- metavar='NAME',
298
- help='Filter to a single table name from table_schemas.',
280
+ '--target-format',
281
+ metavar='FORMAT',
282
+ show_default=False,
283
+ rich_help_panel='Format overrides',
284
+ help=(
285
+ 'Payload format when TARGET is - or a non-file connector. File '
286
+ 'targets infer format from the extension.'
287
+ ),
299
288
  ),
300
289
  ]
301
290
 
302
- RenderTemplateOption = Annotated[
291
+ TargetInputArg = Annotated[
303
292
  str,
293
+ typer.Argument(
294
+ ...,
295
+ metavar='TARGET',
296
+ help=(
297
+ 'Load JSON data from stdin into TARGET. Use --to/--target-type '
298
+ 'to override connector inference when needed. Source data must '
299
+ 'be piped into stdin.'
300
+ ),
301
+ ),
302
+ ]
303
+
304
+ TargetOverrideOption = Annotated[
305
+ str | None,
304
306
  typer.Option(
305
- '--template',
306
- '-t',
307
- metavar='KEY|PATH',
308
- help='Template key (ddl/view) or path to a Jinja template file.',
309
- show_default=True,
307
+ '--target-type',
308
+ metavar='CONNECTOR',
309
+ show_default=False,
310
+ rich_help_panel='I/O overrides',
311
+ help='Override the inferred target type (file, database, api).',
310
312
  ),
311
313
  ]
312
314
 
313
- RenderTemplatePathOption = Annotated[
315
+ TargetPathOption = Annotated[
314
316
  str | None,
315
317
  typer.Option(
316
- '--template-path',
318
+ '--target',
317
319
  metavar='PATH',
318
- help=(
319
- 'Explicit path to a Jinja template file (overrides template key).'
320
- ),
320
+ help='Target file for transformed or validated output (- for stdout).',
321
321
  ),
322
322
  ]
323
323
 
@@ -745,6 +745,83 @@ def _root(
745
745
  raise typer.Exit(0)
746
746
 
747
747
 
748
+ @app.command('check')
749
+ def check_cmd(
750
+ ctx: typer.Context,
751
+ config: PipelineConfigOption,
752
+ jobs: bool = typer.Option(
753
+ False,
754
+ '--jobs',
755
+ help='List available job names and exit',
756
+ ),
757
+ pipelines: bool = typer.Option(
758
+ False,
759
+ '--pipelines',
760
+ help='List ETL pipelines',
761
+ ),
762
+ sources: bool = typer.Option(
763
+ False,
764
+ '--sources',
765
+ help='List data sources',
766
+ ),
767
+ summary: bool = typer.Option(
768
+ False,
769
+ '--summary',
770
+ help='Show pipeline summary (name, version, sources, targets, jobs)',
771
+ ),
772
+ targets: bool = typer.Option(
773
+ False,
774
+ '--targets',
775
+ help='List data targets',
776
+ ),
777
+ transforms: bool = typer.Option(
778
+ False,
779
+ '--transforms',
780
+ help='List data transforms',
781
+ ),
782
+ ) -> int:
783
+ """
784
+ Print ETL entities from a pipeline YAML configuration.
785
+
786
+ Parameters
787
+ ----------
788
+ ctx : typer.Context
789
+ Typer execution context provided to the command.
790
+ config : PipelineConfigOption
791
+ Path to pipeline YAML configuration file.
792
+ jobs : bool, optional
793
+ If True, list available job names and exit.
794
+ pipelines : bool, optional
795
+ If True, list ETL pipelines.
796
+ sources : bool, optional
797
+ If True, list data sources.
798
+ summary : bool, optional
799
+ If True, show pipeline summary (name, version, sources, targets, jobs).
800
+ targets : bool, optional
801
+ If True, list data targets.
802
+ transforms : bool, optional
803
+ If True, list data transforms.
804
+
805
+ Returns
806
+ -------
807
+ int
808
+ Zero on success.
809
+ """
810
+ state = _ensure_state(ctx)
811
+ ns = _stateful_namespace(
812
+ state,
813
+ command='check',
814
+ config=config,
815
+ summary=summary,
816
+ pipelines=pipelines,
817
+ jobs=jobs,
818
+ sources=sources,
819
+ targets=targets,
820
+ transforms=transforms,
821
+ )
822
+ return int(check_handler(ns))
823
+
824
+
748
825
  @app.command('extract')
749
826
  def extract_cmd(
750
827
  ctx: typer.Context,
@@ -828,84 +905,7 @@ def extract_cmd(
828
905
  source=resolved_source,
829
906
  **format_kwargs,
830
907
  )
831
- return int(cmd_extract(ns))
832
-
833
-
834
- @app.command('list')
835
- def list_cmd(
836
- ctx: typer.Context,
837
- config: PipelineConfigOption,
838
- jobs: bool = typer.Option(
839
- False,
840
- '--jobs',
841
- help='List available job names and exit',
842
- ),
843
- pipelines: bool = typer.Option(
844
- False,
845
- '--pipelines',
846
- help='List ETL pipelines',
847
- ),
848
- sources: bool = typer.Option(
849
- False,
850
- '--sources',
851
- help='List data sources',
852
- ),
853
- summary: bool = typer.Option(
854
- False,
855
- '--summary',
856
- help='Show pipeline summary (name, version, sources, targets, jobs)',
857
- ),
858
- targets: bool = typer.Option(
859
- False,
860
- '--targets',
861
- help='List data targets',
862
- ),
863
- transforms: bool = typer.Option(
864
- False,
865
- '--transforms',
866
- help='List data transforms',
867
- ),
868
- ) -> int:
869
- """
870
- Print ETL entities from a pipeline YAML configuration.
871
-
872
- Parameters
873
- ----------
874
- ctx : typer.Context
875
- Typer execution context provided to the command.
876
- config : PipelineConfigOption
877
- Path to pipeline YAML configuration file.
878
- jobs : bool, optional
879
- If True, list available job names and exit.
880
- pipelines : bool, optional
881
- If True, list ETL pipelines.
882
- sources : bool, optional
883
- If True, list data sources.
884
- summary : bool, optional
885
- If True, show pipeline summary (name, version, sources, targets, jobs).
886
- targets : bool, optional
887
- If True, list data targets.
888
- transforms : bool, optional
889
- If True, list data transforms.
890
-
891
- Returns
892
- -------
893
- int
894
- Zero on success.
895
- """
896
- state = _ensure_state(ctx)
897
- ns = _stateful_namespace(
898
- state,
899
- command='list',
900
- config=config,
901
- summary=summary,
902
- pipelines=pipelines,
903
- jobs=jobs,
904
- sources=sources,
905
- targets=targets,
906
- transforms=transforms,
907
- )
908
- return int(cmd_list(ns))
908
+ return int(extract_handler(ns))
909
909
 
910
910
 
911
911
  @app.command('load')
@@ -1007,7 +1007,7 @@ def load_cmd(
1007
1007
  target=resolved_target,
1008
1008
  **format_kwargs,
1009
1009
  )
1010
- return int(cmd_load(ns))
1010
+ return int(load_handler(ns))
1011
1011
 
1012
1012
 
1013
1013
  @app.command('pipeline')
@@ -1061,7 +1061,7 @@ def pipeline_cmd(
1061
1061
  list=jobs,
1062
1062
  run=run_target,
1063
1063
  )
1064
- return int(cmd_pipeline(ns))
1064
+ return int(pipeline_handler(ns))
1065
1065
 
1066
1066
 
1067
1067
  @app.command('render')
@@ -1110,7 +1110,7 @@ def render_cmd(
1110
1110
  template_path=template_path,
1111
1111
  output=output,
1112
1112
  )
1113
- return int(cmd_render(ns))
1113
+ return int(render_handler(ns))
1114
1114
 
1115
1115
 
1116
1116
  @app.command('run')
@@ -1157,7 +1157,7 @@ def run_cmd(
1157
1157
  job=job,
1158
1158
  pipeline=pipeline,
1159
1159
  )
1160
- return int(cmd_run(ns))
1160
+ return int(run_handler(ns))
1161
1161
 
1162
1162
 
1163
1163
  @app.command('transform')
@@ -1294,7 +1294,7 @@ def transform_cmd(
1294
1294
  target_format=target_format_kwargs['format'],
1295
1295
  **target_format_kwargs,
1296
1296
  )
1297
- return int(cmd_transform(ns))
1297
+ return int(transform_handler(ns))
1298
1298
 
1299
1299
 
1300
1300
  @app.command('validate')
@@ -1364,4 +1364,4 @@ def validate_cmd(
1364
1364
  source_format=source_format,
1365
1365
  **source_format_kwargs,
1366
1366
  )
1367
- return int(cmd_validate(ns))
1367
+ return int(validate_handler(ns))
etlplus/cli/handlers.py CHANGED
@@ -36,14 +36,14 @@ from ..validate import validate
36
36
 
37
37
  __all__ = [
38
38
  # Functions
39
- 'cmd_extract',
40
- 'cmd_list',
41
- 'cmd_load',
42
- 'cmd_pipeline',
43
- 'cmd_render',
44
- 'cmd_run',
45
- 'cmd_transform',
46
- 'cmd_validate',
39
+ 'extract_handler',
40
+ 'check_handler',
41
+ 'load_handler',
42
+ 'pipeline_handler',
43
+ 'render_handler',
44
+ 'run_handler',
45
+ 'transform_handler',
46
+ 'validate_handler',
47
47
  ]
48
48
 
49
49
 
@@ -148,12 +148,12 @@ def _infer_payload_format(
148
148
  return 'csv'
149
149
 
150
150
 
151
- def _list_sections(
151
+ def _check_sections(
152
152
  cfg: PipelineConfig,
153
153
  args: argparse.Namespace,
154
154
  ) -> dict[str, Any]:
155
155
  """
156
- Build sectioned metadata output for the list command.
156
+ Build sectioned metadata output for the check command.
157
157
 
158
158
  Parameters
159
159
  ----------
@@ -165,7 +165,7 @@ def _list_sections(
165
165
  Returns
166
166
  -------
167
167
  dict[str, Any]
168
- Metadata output for the list command.
168
+ Metadata output for the check command.
169
169
  """
170
170
  sections: dict[str, Any] = {}
171
171
  if getattr(args, 'jobs', False):
@@ -423,11 +423,11 @@ def _write_json_output(
423
423
  # SECTION: FUNCTIONS ======================================================== #
424
424
 
425
425
 
426
- def cmd_extract(
426
+ def check_handler(
427
427
  args: argparse.Namespace,
428
428
  ) -> int:
429
429
  """
430
- Extract data from a source.
430
+ Print requested pipeline sections from a YAML configuration.
431
431
 
432
432
  Parameters
433
433
  ----------
@@ -439,40 +439,20 @@ def cmd_extract(
439
439
  int
440
440
  Zero on success.
441
441
  """
442
- pretty, _ = _presentation_flags(args)
443
- explicit_format = _explicit_cli_format(args)
444
-
445
- if args.source == '-':
446
- text = _read_stdin_text()
447
- payload = _parse_text_payload(text, getattr(args, 'format', None))
448
- _emit_json(payload, pretty=pretty)
449
-
442
+ cfg = load_pipeline_config(args.config, substitute=True)
443
+ if getattr(args, 'summary', False):
444
+ print_json(_pipeline_summary(cfg))
450
445
  return 0
451
446
 
452
- result = extract(
453
- args.source_type,
454
- args.source,
455
- file_format=explicit_format,
456
- )
457
- output_path = getattr(args, 'target', None)
458
- if output_path is None:
459
- output_path = getattr(args, 'output', None)
460
-
461
- if not _write_json_output(
462
- result,
463
- output_path,
464
- success_message='Data extracted and saved to',
465
- ):
466
- _emit_json(result, pretty=pretty)
467
-
447
+ print_json(_check_sections(cfg, args))
468
448
  return 0
469
449
 
470
450
 
471
- def cmd_validate(
451
+ def extract_handler(
472
452
  args: argparse.Namespace,
473
453
  ) -> int:
474
454
  """
475
- Validate data from a source.
455
+ Extract data from a source.
476
456
 
477
457
  Parameters
478
458
  ----------
@@ -484,81 +464,36 @@ def cmd_validate(
484
464
  int
485
465
  Zero on success.
486
466
  """
487
- pretty, _quiet = _presentation_flags(args)
488
- format_explicit: bool = getattr(args, '_format_explicit', False)
489
- format_hint: str | None = getattr(args, 'source_format', None)
490
- payload = cast(
491
- JSONData | str,
492
- _resolve_cli_payload(
493
- args.source,
494
- format_hint=format_hint,
495
- format_explicit=format_explicit,
496
- ),
497
- )
498
- result = validate(payload, args.rules)
499
-
500
- target_path = getattr(args, 'target', None)
501
- if target_path:
502
- validated_data = result.get('data')
503
- if validated_data is not None:
504
- _write_json_output(
505
- validated_data,
506
- target_path,
507
- success_message='Validation result saved to',
508
- )
509
- else:
510
- print(
511
- f'Validation failed, no data to save for {target_path}',
512
- file=sys.stderr,
513
- )
514
- else:
515
- _emit_json(result, pretty=pretty)
516
-
517
- return 0
518
-
519
-
520
- def cmd_transform(
521
- args: argparse.Namespace,
522
- ) -> int:
523
- """
524
- Transform data from a source.
467
+ pretty, _ = _presentation_flags(args)
468
+ explicit_format = _explicit_cli_format(args)
525
469
 
526
- Parameters
527
- ----------
528
- args : argparse.Namespace
529
- Parsed command-line arguments.
470
+ if args.source == '-':
471
+ text = _read_stdin_text()
472
+ payload = _parse_text_payload(text, getattr(args, 'format', None))
473
+ _emit_json(payload, pretty=pretty)
530
474
 
531
- Returns
532
- -------
533
- int
534
- Zero on success.
535
- """
536
- pretty, _quiet = _presentation_flags(args)
537
- format_hint: str | None = getattr(args, 'source_format', None)
538
- format_explicit: bool = format_hint is not None
475
+ return 0
539
476
 
540
- payload = cast(
541
- JSONData | str,
542
- _resolve_cli_payload(
543
- args.source,
544
- format_hint=format_hint,
545
- format_explicit=format_explicit,
546
- ),
477
+ result = extract(
478
+ args.source_type,
479
+ args.source,
480
+ file_format=explicit_format,
547
481
  )
548
-
549
- data = transform(payload, args.operations)
482
+ output_path = getattr(args, 'target', None)
483
+ if output_path is None:
484
+ output_path = getattr(args, 'output', None)
550
485
 
551
486
  if not _write_json_output(
552
- data,
553
- getattr(args, 'target', None),
554
- success_message='Data transformed and saved to',
487
+ result,
488
+ output_path,
489
+ success_message='Data extracted and saved to',
555
490
  ):
556
- _emit_json(data, pretty=pretty)
491
+ _emit_json(result, pretty=pretty)
557
492
 
558
493
  return 0
559
494
 
560
495
 
561
- def cmd_load(
496
+ def load_handler(
562
497
  args: argparse.Namespace,
563
498
  ) -> int:
564
499
  """
@@ -617,7 +552,7 @@ def cmd_load(
617
552
  return 0
618
553
 
619
554
 
620
- def cmd_pipeline(
555
+ def pipeline_handler(
621
556
  args: argparse.Namespace,
622
557
  ) -> int:
623
558
  """
@@ -634,7 +569,7 @@ def cmd_pipeline(
634
569
  Zero on success.
635
570
  """
636
571
  print(
637
- 'DEPRECATED: use "etlplus list --summary|--jobs" or '
572
+ 'DEPRECATED: use "etlplus check --summary|--jobs" or '
638
573
  '"etlplus run --job/--pipeline" instead of "etlplus pipeline".',
639
574
  file=sys.stderr,
640
575
  )
@@ -661,12 +596,11 @@ def cmd_pipeline(
661
596
  return 0
662
597
 
663
598
 
664
- def cmd_render(
599
+ def render_handler(
665
600
  args: argparse.Namespace,
666
601
  ) -> int:
667
602
  """Render SQL DDL statements from table schema specs."""
668
-
669
- _pretty, quiet = _presentation_flags(args)
603
+ _, quiet = _presentation_flags(args)
670
604
 
671
605
  template_value = getattr(args, 'template', 'ddl') or 'ddl'
672
606
  template_path = getattr(args, 'template_path', None)
@@ -722,9 +656,11 @@ def cmd_render(
722
656
  return 0
723
657
 
724
658
 
725
- def cmd_list(args: argparse.Namespace) -> int:
659
+ def run_handler(
660
+ args: argparse.Namespace,
661
+ ) -> int:
726
662
  """
727
- Print requested pipeline sections from a YAML configuration.
663
+ Execute an ETL job end-to-end from a pipeline YAML configuration.
728
664
 
729
665
  Parameters
730
666
  ----------
@@ -737,17 +673,22 @@ def cmd_list(args: argparse.Namespace) -> int:
737
673
  Zero on success.
738
674
  """
739
675
  cfg = load_pipeline_config(args.config, substitute=True)
740
- if getattr(args, 'summary', False):
741
- print_json(_pipeline_summary(cfg))
676
+
677
+ job_name = getattr(args, 'job', None) or getattr(args, 'pipeline', None)
678
+ if job_name:
679
+ result = run(job=job_name, config_path=args.config)
680
+ print_json({'status': 'ok', 'result': result})
742
681
  return 0
743
682
 
744
- print_json(_list_sections(cfg, args))
683
+ print_json(_pipeline_summary(cfg))
745
684
  return 0
746
685
 
747
686
 
748
- def cmd_run(args: argparse.Namespace) -> int:
687
+ def transform_handler(
688
+ args: argparse.Namespace,
689
+ ) -> int:
749
690
  """
750
- Execute an ETL job end-to-end from a pipeline YAML configuration.
691
+ Transform data from a source.
751
692
 
752
693
  Parameters
753
694
  ----------
@@ -759,13 +700,75 @@ def cmd_run(args: argparse.Namespace) -> int:
759
700
  int
760
701
  Zero on success.
761
702
  """
762
- cfg = load_pipeline_config(args.config, substitute=True)
703
+ pretty, _ = _presentation_flags(args)
704
+ format_hint: str | None = getattr(args, 'source_format', None)
705
+ format_explicit: bool = format_hint is not None
763
706
 
764
- job_name = getattr(args, 'job', None) or getattr(args, 'pipeline', None)
765
- if job_name:
766
- result = run(job=job_name, config_path=args.config)
767
- print_json({'status': 'ok', 'result': result})
768
- return 0
707
+ payload = cast(
708
+ JSONData | str,
709
+ _resolve_cli_payload(
710
+ args.source,
711
+ format_hint=format_hint,
712
+ format_explicit=format_explicit,
713
+ ),
714
+ )
715
+
716
+ data = transform(payload, args.operations)
717
+
718
+ if not _write_json_output(
719
+ data,
720
+ getattr(args, 'target', None),
721
+ success_message='Data transformed and saved to',
722
+ ):
723
+ _emit_json(data, pretty=pretty)
724
+
725
+ return 0
726
+
727
+
728
+ def validate_handler(
729
+ args: argparse.Namespace,
730
+ ) -> int:
731
+ """
732
+ Validate data from a source.
733
+
734
+ Parameters
735
+ ----------
736
+ args : argparse.Namespace
737
+ Parsed command-line arguments.
738
+
739
+ Returns
740
+ -------
741
+ int
742
+ Zero on success.
743
+ """
744
+ pretty, _ = _presentation_flags(args)
745
+ format_explicit: bool = getattr(args, '_format_explicit', False)
746
+ format_hint: str | None = getattr(args, 'source_format', None)
747
+ payload = cast(
748
+ JSONData | str,
749
+ _resolve_cli_payload(
750
+ args.source,
751
+ format_hint=format_hint,
752
+ format_explicit=format_explicit,
753
+ ),
754
+ )
755
+ result = validate(payload, args.rules)
756
+
757
+ target_path = getattr(args, 'target', None)
758
+ if target_path:
759
+ validated_data = result.get('data')
760
+ if validated_data is not None:
761
+ _write_json_output(
762
+ validated_data,
763
+ target_path,
764
+ success_message='Validation result saved to',
765
+ )
766
+ else:
767
+ print(
768
+ f'Validation failed, no data to save for {target_path}',
769
+ file=sys.stderr,
770
+ )
771
+ else:
772
+ _emit_json(result, pretty=pretty)
769
773
 
770
- print_json(_pipeline_summary(cfg))
771
774
  return 0
etlplus/cli/main.py CHANGED
@@ -24,14 +24,14 @@ from ..enums import FileFormat
24
24
  from ..utils import json_type
25
25
  from .app import PROJECT_URL
26
26
  from .app import app
27
- from .handlers import cmd_extract
28
- from .handlers import cmd_list
29
- from .handlers import cmd_load
30
- from .handlers import cmd_pipeline
31
- from .handlers import cmd_render
32
- from .handlers import cmd_run
33
- from .handlers import cmd_transform
34
- from .handlers import cmd_validate
27
+ from .handlers import check_handler
28
+ from .handlers import extract_handler
29
+ from .handlers import load_handler
30
+ from .handlers import pipeline_handler
31
+ from .handlers import render_handler
32
+ from .handlers import run_handler
33
+ from .handlers import transform_handler
34
+ from .handlers import validate_handler
35
35
 
36
36
  # SECTION: EXPORTS ========================================================== #
37
37
 
@@ -329,7 +329,7 @@ def create_parser() -> argparse.ArgumentParser:
329
329
  ),
330
330
  )
331
331
  _add_format_options(extract_parser, context='source')
332
- extract_parser.set_defaults(func=cmd_extract)
332
+ extract_parser.set_defaults(func=extract_handler)
333
333
 
334
334
  validate_parser = subparsers.add_parser(
335
335
  'validate',
@@ -346,7 +346,7 @@ def create_parser() -> argparse.ArgumentParser:
346
346
  default={},
347
347
  help='Validation rules as JSON string',
348
348
  )
349
- validate_parser.set_defaults(func=cmd_validate)
349
+ validate_parser.set_defaults(func=validate_handler)
350
350
 
351
351
  transform_parser = subparsers.add_parser(
352
352
  'transform',
@@ -394,7 +394,7 @@ def create_parser() -> argparse.ArgumentParser:
394
394
  'File targets infer format from the extension.'
395
395
  ),
396
396
  )
397
- transform_parser.set_defaults(func=cmd_transform)
397
+ transform_parser.set_defaults(func=transform_handler)
398
398
 
399
399
  load_parser = subparsers.add_parser(
400
400
  'load',
@@ -418,7 +418,7 @@ def create_parser() -> argparse.ArgumentParser:
418
418
  ),
419
419
  )
420
420
  _add_format_options(load_parser, context='target')
421
- load_parser.set_defaults(func=cmd_load)
421
+ load_parser.set_defaults(func=load_handler)
422
422
 
423
423
  pipe_parser = subparsers.add_parser(
424
424
  'pipeline',
@@ -440,7 +440,7 @@ def create_parser() -> argparse.ArgumentParser:
440
440
  metavar='JOB',
441
441
  help='Run a specific job by name',
442
442
  )
443
- pipe_parser.set_defaults(func=cmd_pipeline)
443
+ pipe_parser.set_defaults(func=pipeline_handler)
444
444
 
445
445
  render_parser = subparsers.add_parser(
446
446
  'render',
@@ -476,47 +476,47 @@ def create_parser() -> argparse.ArgumentParser:
476
476
  'Explicit path to a Jinja template file (overrides template key).'
477
477
  ),
478
478
  )
479
- render_parser.set_defaults(func=cmd_render)
479
+ render_parser.set_defaults(func=render_handler)
480
480
 
481
- list_parser = subparsers.add_parser(
482
- 'list',
483
- help='List ETL pipeline metadata',
481
+ check_parser = subparsers.add_parser(
482
+ 'check',
483
+ help='Inspect ETL pipeline metadata',
484
484
  formatter_class=argparse.ArgumentDefaultsHelpFormatter,
485
485
  )
486
- _add_config_option(list_parser)
486
+ _add_config_option(check_parser)
487
487
  _add_boolean_flag(
488
- list_parser,
488
+ check_parser,
489
489
  name='jobs',
490
490
  help_text='List ETL jobs',
491
491
  )
492
492
  _add_boolean_flag(
493
- list_parser,
493
+ check_parser,
494
494
  name='pipelines',
495
495
  help_text='List ETL pipelines',
496
496
  )
497
497
  _add_boolean_flag(
498
- list_parser,
498
+ check_parser,
499
499
  name='sources',
500
500
  help_text='List data sources',
501
501
  )
502
502
  _add_boolean_flag(
503
- list_parser,
503
+ check_parser,
504
504
  name='summary',
505
505
  help_text=(
506
506
  'Show pipeline summary (name, version, sources, targets, jobs)'
507
507
  ),
508
508
  )
509
509
  _add_boolean_flag(
510
- list_parser,
510
+ check_parser,
511
511
  name='targets',
512
512
  help_text='List data targets',
513
513
  )
514
514
  _add_boolean_flag(
515
- list_parser,
515
+ check_parser,
516
516
  name='transforms',
517
517
  help_text='List data transforms',
518
518
  )
519
- list_parser.set_defaults(func=cmd_list)
519
+ check_parser.set_defaults(func=check_handler)
520
520
 
521
521
  run_parser = subparsers.add_parser(
522
522
  'run',
@@ -537,7 +537,7 @@ def create_parser() -> argparse.ArgumentParser:
537
537
  '--pipeline',
538
538
  help='Name of the pipeline to run',
539
539
  )
540
- run_parser.set_defaults(func=cmd_run)
540
+ run_parser.set_defaults(func=run_handler)
541
541
 
542
542
  return parser
543
543
 
etlplus/run.py CHANGED
@@ -142,10 +142,8 @@ def run(
142
142
  """
143
143
  Run a pipeline job defined in a YAML configuration.
144
144
 
145
- This mirrors the run-mode logic from ``etlplus.cli.cmd_pipeline``
146
- (without the list/summary modes). By default it reads the configuration
147
- from ``in/pipeline.yml``, but callers can provide an explicit
148
- ``config_path`` to override this.
145
+ By default it reads the configuration from ``in/pipeline.yml``, but callers
146
+ can provide an explicit ``config_path`` to override this.
149
147
 
150
148
  Parameters
151
149
  ----------
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: etlplus
3
- Version: 0.5.3
3
+ Version: 0.5.5
4
4
  Summary: A Swiss Army knife for simple ETL operations
5
5
  Home-page: https://github.com/Dagitali/ETLPlus
6
6
  Author: ETLPlus Team
@@ -90,7 +90,7 @@ package and command-line interface for data extraction, validation, transformati
90
90
 
91
91
  ## Features
92
92
 
93
- - **Inspect** data pipeline definitions before running them:
93
+ - **Check** data pipeline definitions before running them:
94
94
  - Summarize jobs, sources, targets, and transforms
95
95
  - Confirm configuration changes by printing focused sections on demand
96
96
 
@@ -179,21 +179,21 @@ etlplus --help
179
179
  etlplus --version
180
180
  ```
181
181
 
182
- #### Inspect Pipelines
182
+ #### Check Pipelines
183
183
 
184
- Use `etlplus list` to explore pipeline YAML definitions without running them. The command can print
184
+ Use `etlplus check` to explore pipeline YAML definitions without running them. The command can print
185
185
  job names, summarize configured sources and targets, or drill into specific sections.
186
186
 
187
187
  List jobs and show a pipeline summary:
188
188
  ```bash
189
- etlplus list --config examples/configs/pipeline.yml --jobs
190
- etlplus list --config examples/configs/pipeline.yml --summary
189
+ etlplus check --config examples/configs/pipeline.yml --jobs
190
+ etlplus check --config examples/configs/pipeline.yml --summary
191
191
  ```
192
192
 
193
193
  Show sources or transforms for troubleshooting:
194
194
  ```bash
195
- etlplus list --config examples/configs/pipeline.yml --sources
196
- etlplus list --config examples/configs/pipeline.yml --transforms
195
+ etlplus check --config examples/configs/pipeline.yml --sources
196
+ etlplus check --config examples/configs/pipeline.yml --transforms
197
197
  ```
198
198
 
199
199
  #### Render SQL DDL
@@ -358,8 +358,8 @@ CLI quick reference for pipelines:
358
358
 
359
359
  ```bash
360
360
  # List jobs or show a pipeline summary
361
- etlplus list --config examples/configs/pipeline.yml --jobs
362
- etlplus list --config examples/configs/pipeline.yml --summary
361
+ etlplus check --config examples/configs/pipeline.yml --jobs
362
+ etlplus check --config examples/configs/pipeline.yml --summary
363
363
 
364
364
  # Run a job
365
365
  etlplus run --config examples/configs/pipeline.yml --job file_to_file_customers
@@ -8,7 +8,7 @@ etlplus/file.py,sha256=RxIAsGDN4f_vNA2B5-ct88JNd_ISAyYbooIRE5DstS8,17972
8
8
  etlplus/load.py,sha256=BwF3gT4gIr-5CvNMz_aLTCl-w2ihWSTxNVd4X92XFwI,8737
9
9
  etlplus/mixins.py,sha256=ifGpHwWv7U00yqGf-kN93vJax2IiK4jaGtTsPsO3Oak,1350
10
10
  etlplus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- etlplus/run.py,sha256=zl_Yx35spcgaa9Xx7-kcJEb1CAYyMIiqtPlsSrYxRfs,12448
11
+ etlplus/run.py,sha256=X4kp5FQlIWVf1_d9oSrchKau7BFDCE1Zkscvu7WPaWw,12340
12
12
  etlplus/run_helpers.py,sha256=bj6MkaeFxjl3CeKG1HoXKx5DwAlXNERVW-GX-z1P_qQ,24373
13
13
  etlplus/transform.py,sha256=uAUVDDHYCgx7GpVez9IK3OAZM-CnCuMa9iox3vwGGJA,25296
14
14
  etlplus/types.py,sha256=SJiZ7wJiSnV4CEvF-9E5nSFLBo4DT9OqHQqj1GSHkv8,6042
@@ -32,9 +32,9 @@ etlplus/api/rate_limiting/__init__.py,sha256=ZySB1dZettEDnWvI1EHf_TZ9L08M_kKsNR-
32
32
  etlplus/api/rate_limiting/config.py,sha256=2b4wIynblN-1EyMqI4aXa71SljzSjXYh5N1Nngr3jOg,9406
33
33
  etlplus/api/rate_limiting/rate_limiter.py,sha256=Uxozqd_Ej5Lsj-M-mLT2WexChgWh7x35_YP10yqYPQA,7159
34
34
  etlplus/cli/__init__.py,sha256=J97-Rv931IL1_b4AXnB7Fbbd7HKnHBpx18NQfC_kE6c,299
35
- etlplus/cli/app.py,sha256=buGIIoSIu5cxbYTdPcA_iaxJaPG-eHj-LPD9OgZ0h9w,35824
36
- etlplus/cli/handlers.py,sha256=O7Mh9nowdMCzaV36KASWZVC4fNMEg9xnVZXE7NHW6P8,18873
37
- etlplus/cli/main.py,sha256=5qWAKqlRtnb4VEpBfGT45q-LBxi_2hSMnw23jNyYA_Q,16497
35
+ etlplus/cli/app.py,sha256=SYPO-NDwXgymJrACw39jZ_NJrSKAs0O8anuWR5o42WM,35893
36
+ etlplus/cli/handlers.py,sha256=ZPoV9N48mtpUMf4S_czAKpo4ZqLFVb4tcTq0B36v-84,18941
37
+ etlplus/cli/main.py,sha256=ijYOy72SEsxrTEBan5yADW8CZyr0yddVF8HeMgFw6Zg,16576
38
38
  etlplus/config/__init__.py,sha256=VZWzOg7d2YR9NT6UwKTv44yf2FRUMjTHynkm1Dl5Qzo,1486
39
39
  etlplus/config/connector.py,sha256=0-TIwevHbKRHVmucvyGpPd-3tB1dKHB-dj0yJ6kq5eY,9809
40
40
  etlplus/config/jobs.py,sha256=hmzRCqt0OvCEZZR4ONKrd3lvSv0OmayjLc4yOBk3ug8,7399
@@ -47,9 +47,9 @@ etlplus/templates/ddl.sql.j2,sha256=s8fMWvcb4eaJVXkifuib1aQPljtZ8buuyB_uA-ZdU3Q,
47
47
  etlplus/templates/view.sql.j2,sha256=Iy8DHfhq5yyvrUKDxqp_aHIEXY4Tm6j4wT7YDEFWAhk,2180
48
48
  etlplus/validation/__init__.py,sha256=Pe5Xg1_EA4uiNZGYu5WTF3j7odjmyxnAJ8rcioaplSQ,1254
49
49
  etlplus/validation/utils.py,sha256=Mtqg449VIke0ziy_wd2r6yrwJzQkA1iulZC87FzXMjo,10201
50
- etlplus-0.5.3.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
51
- etlplus-0.5.3.dist-info/METADATA,sha256=yaJ4G8RA4EjaQwWoCabTSrylNu55kjkDFHFYNIaVabE,19285
52
- etlplus-0.5.3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- etlplus-0.5.3.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
54
- etlplus-0.5.3.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
55
- etlplus-0.5.3.dist-info/RECORD,,
50
+ etlplus-0.5.5.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
51
+ etlplus-0.5.5.dist-info/METADATA,sha256=I6_4VqX4PBWV2T5KYctjcoL9aCwp6Iixc9IZhgLUKFQ,19288
52
+ etlplus-0.5.5.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
+ etlplus-0.5.5.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
54
+ etlplus-0.5.5.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
55
+ etlplus-0.5.5.dist-info/RECORD,,