etlplus 0.4.9__tar.gz → 0.5.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus-0.5.2/MANIFEST.in +12 -0
- {etlplus-0.4.9/etlplus.egg-info → etlplus-0.5.2}/PKG-INFO +41 -1
- {etlplus-0.4.9 → etlplus-0.5.2}/README.md +40 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/cli/app.py +112 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/cli/handlers.py +114 -20
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/cli/main.py +37 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/pipeline.py +11 -0
- etlplus-0.5.2/etlplus/ddl.py +197 -0
- etlplus-0.5.2/etlplus/templates/__init__.py +5 -0
- etlplus-0.5.2/etlplus/templates/ddl.sql.j2 +128 -0
- etlplus-0.5.2/etlplus/templates/view.sql.j2 +69 -0
- {etlplus-0.4.9 → etlplus-0.5.2/etlplus.egg-info}/PKG-INFO +41 -1
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus.egg-info/SOURCES.txt +6 -0
- etlplus-0.5.2/examples/configs/ddl_spec.yml +67 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/pyproject.toml +1 -1
- {etlplus-0.4.9 → etlplus-0.5.2}/setup.py +4 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/cli/test_u_cli_app.py +31 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/cli/test_u_cli_handlers.py +67 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/cli/test_u_cli_main.py +24 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/config/test_u_pipeline.py +30 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.coveragerc +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.editorconfig +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.gitattributes +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.github/actions/python-bootstrap/action.yml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.github/workflows/ci.yml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.gitignore +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.pre-commit-config.yaml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/.ruff.toml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/CODE_OF_CONDUCT.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/CONTRIBUTING.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/DEMO.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/LICENSE +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/Makefile +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/REFERENCES.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/docs/pipeline-guide.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/docs/snippets/installation_version.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/__main__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/__version__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/README.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/auth.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/endpoint_client.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/errors.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/pagination/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/pagination/client.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/pagination/config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/pagination/paginator.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/rate_limiting/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/rate_limiting/config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/rate_limiting/rate_limiter.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/request_manager.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/retry_manager.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/transport.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/api/types.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/cli/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/connector.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/jobs.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/profile.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/types.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/config/utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/enums.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/extract.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/file.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/load.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/mixins.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/py.typed +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/run.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/run_helpers.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/transform.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/types.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/validate.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/validation/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus/validation/utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus.egg-info/dependency_links.txt +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus.egg-info/entry_points.txt +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus.egg-info/requires.txt +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/etlplus.egg-info/top_level.txt +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/README.md +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/configs/pipeline.yml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/data/sample.csv +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/data/sample.json +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/data/sample.xml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/data/sample.xsd +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/data/sample.yaml +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/examples/quickstart_python.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/pytest.ini +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/setup.cfg +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/__init__.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/conftest.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/conftest.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_cli.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_examples_data_parity.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_pagination_strategy.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_pipeline_smoke.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_pipeline_yaml_load.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_run.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_run_profile_pagination_defaults.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/integration/test_i_run_profile_rate_limit_defaults.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/conftest.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_auth.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_endpoint_client.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_mocks.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_pagination_client.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_pagination_config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_paginator.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_rate_limit_config.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_rate_limiter.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_request_manager.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_retry_manager.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_transport.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/api/test_u_types.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/cli/conftest.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/config/test_u_config_utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/config/test_u_connector.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/config/test_u_jobs.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/conftest.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_enums.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_extract.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_file.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_load.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_main.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_mixins.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_run.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_run_helpers.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_transform.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_validate.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/test_u_version.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tests/unit/validation/test_u_validation_utils.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tools/run_pipeline.py +0 -0
- {etlplus-0.4.9 → etlplus-0.5.2}/tools/update_demo_snippets.py +0 -0
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
# MANIFEST.in
|
|
2
|
+
# ETLPlus
|
|
3
|
+
#
|
|
4
|
+
# Copyright © 2026 Dagitali LLC. All rights reserved.
|
|
5
|
+
#
|
|
6
|
+
# Contains commands that allow lists of files to be discovered and manipulated.
|
|
7
|
+
#
|
|
8
|
+
# See:
|
|
9
|
+
# 1. https://setuptools.pypa.io/en/latest/userguide/miscellaneous.html
|
|
10
|
+
|
|
11
|
+
# Include Jinja template files in the etlplus package
|
|
12
|
+
recursive-include etlplus/templates *.j2
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: etlplus
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.5.2
|
|
4
4
|
Summary: A Swiss Army knife for simple ETL operations
|
|
5
5
|
Home-page: https://github.com/Dagitali/ETLPlus
|
|
6
6
|
Author: ETLPlus Team
|
|
@@ -61,6 +61,8 @@ package and command-line interface for data extraction, validation, transformati
|
|
|
61
61
|
- [Quickstart](#quickstart)
|
|
62
62
|
- [Usage](#usage)
|
|
63
63
|
- [Command Line Interface](#command-line-interface)
|
|
64
|
+
- [Inspect Pipelines](#inspect-pipelines)
|
|
65
|
+
- [Render SQL DDL](#render-sql-ddl)
|
|
64
66
|
- [Extract Data](#extract-data)
|
|
65
67
|
- [Validate Data](#validate-data)
|
|
66
68
|
- [Transform Data](#transform-data)
|
|
@@ -169,6 +171,44 @@ etlplus --help
|
|
|
169
171
|
etlplus --version
|
|
170
172
|
```
|
|
171
173
|
|
|
174
|
+
#### Inspect Pipelines
|
|
175
|
+
|
|
176
|
+
Use `etlplus list` to explore pipeline YAML definitions without running them. The command can print
|
|
177
|
+
job names, summarize configured sources and targets, or drill into specific sections.
|
|
178
|
+
|
|
179
|
+
List jobs and show a pipeline summary:
|
|
180
|
+
```bash
|
|
181
|
+
etlplus list --config examples/configs/pipeline.yml --jobs
|
|
182
|
+
etlplus list --config examples/configs/pipeline.yml --summary
|
|
183
|
+
```
|
|
184
|
+
|
|
185
|
+
Show sources or transforms for troubleshooting:
|
|
186
|
+
```bash
|
|
187
|
+
etlplus list --config examples/configs/pipeline.yml --sources
|
|
188
|
+
etlplus list --config examples/configs/pipeline.yml --transforms
|
|
189
|
+
```
|
|
190
|
+
|
|
191
|
+
#### Render SQL DDL
|
|
192
|
+
|
|
193
|
+
Use `etlplus render` to turn table schema specs into ready-to-run SQL. Render from a pipeline config
|
|
194
|
+
or from a standalone schema file, and choose the built-in `ddl` or `view` templates (or provide your
|
|
195
|
+
own).
|
|
196
|
+
|
|
197
|
+
Render all tables defined in a pipeline:
|
|
198
|
+
```bash
|
|
199
|
+
etlplus render --config examples/configs/pipeline.yml --template ddl
|
|
200
|
+
```
|
|
201
|
+
|
|
202
|
+
Render a single table in that pipeline:
|
|
203
|
+
```bash
|
|
204
|
+
etlplus render --config examples/configs/pipeline.yml --table customers --template view
|
|
205
|
+
```
|
|
206
|
+
|
|
207
|
+
Render from a standalone table spec to a file:
|
|
208
|
+
```bash
|
|
209
|
+
etlplus render --spec schemas/customer.yml --template view -o temp/customer_view.sql
|
|
210
|
+
```
|
|
211
|
+
|
|
172
212
|
#### Extract Data
|
|
173
213
|
|
|
174
214
|
Note: For file sources, the format is normally inferred from the filename extension. Use
|
|
@@ -19,6 +19,8 @@ package and command-line interface for data extraction, validation, transformati
|
|
|
19
19
|
- [Quickstart](#quickstart)
|
|
20
20
|
- [Usage](#usage)
|
|
21
21
|
- [Command Line Interface](#command-line-interface)
|
|
22
|
+
- [Inspect Pipelines](#inspect-pipelines)
|
|
23
|
+
- [Render SQL DDL](#render-sql-ddl)
|
|
22
24
|
- [Extract Data](#extract-data)
|
|
23
25
|
- [Validate Data](#validate-data)
|
|
24
26
|
- [Transform Data](#transform-data)
|
|
@@ -127,6 +129,44 @@ etlplus --help
|
|
|
127
129
|
etlplus --version
|
|
128
130
|
```
|
|
129
131
|
|
|
132
|
+
#### Inspect Pipelines
|
|
133
|
+
|
|
134
|
+
Use `etlplus list` to explore pipeline YAML definitions without running them. The command can print
|
|
135
|
+
job names, summarize configured sources and targets, or drill into specific sections.
|
|
136
|
+
|
|
137
|
+
List jobs and show a pipeline summary:
|
|
138
|
+
```bash
|
|
139
|
+
etlplus list --config examples/configs/pipeline.yml --jobs
|
|
140
|
+
etlplus list --config examples/configs/pipeline.yml --summary
|
|
141
|
+
```
|
|
142
|
+
|
|
143
|
+
Show sources or transforms for troubleshooting:
|
|
144
|
+
```bash
|
|
145
|
+
etlplus list --config examples/configs/pipeline.yml --sources
|
|
146
|
+
etlplus list --config examples/configs/pipeline.yml --transforms
|
|
147
|
+
```
|
|
148
|
+
|
|
149
|
+
#### Render SQL DDL
|
|
150
|
+
|
|
151
|
+
Use `etlplus render` to turn table schema specs into ready-to-run SQL. Render from a pipeline config
|
|
152
|
+
or from a standalone schema file, and choose the built-in `ddl` or `view` templates (or provide your
|
|
153
|
+
own).
|
|
154
|
+
|
|
155
|
+
Render all tables defined in a pipeline:
|
|
156
|
+
```bash
|
|
157
|
+
etlplus render --config examples/configs/pipeline.yml --template ddl
|
|
158
|
+
```
|
|
159
|
+
|
|
160
|
+
Render a single table in that pipeline:
|
|
161
|
+
```bash
|
|
162
|
+
etlplus render --config examples/configs/pipeline.yml --table customers --template view
|
|
163
|
+
```
|
|
164
|
+
|
|
165
|
+
Render from a standalone table spec to a file:
|
|
166
|
+
```bash
|
|
167
|
+
etlplus render --spec schemas/customer.yml --template view -o temp/customer_view.sql
|
|
168
|
+
```
|
|
169
|
+
|
|
130
170
|
#### Extract Data
|
|
131
171
|
|
|
132
172
|
Note: For file sources, the format is normally inferred from the filename extension. Use
|
|
@@ -25,6 +25,7 @@ Subcommands
|
|
|
25
25
|
- ``validate``: validate data against rules
|
|
26
26
|
- ``transform``: transform records
|
|
27
27
|
- ``load``: load data to files, databases, or REST APIs
|
|
28
|
+
- ``render``: render SQL DDL from table schema specs
|
|
28
29
|
|
|
29
30
|
Notes
|
|
30
31
|
-----
|
|
@@ -60,6 +61,7 @@ from .handlers import cmd_extract
|
|
|
60
61
|
from .handlers import cmd_list
|
|
61
62
|
from .handlers import cmd_load
|
|
62
63
|
from .handlers import cmd_pipeline
|
|
64
|
+
from .handlers import cmd_render
|
|
63
65
|
from .handlers import cmd_run
|
|
64
66
|
from .handlers import cmd_transform
|
|
65
67
|
from .handlers import cmd_validate
|
|
@@ -258,6 +260,67 @@ PipelineConfigOption = Annotated[
|
|
|
258
260
|
),
|
|
259
261
|
]
|
|
260
262
|
|
|
263
|
+
RenderConfigOption = Annotated[
|
|
264
|
+
str | None,
|
|
265
|
+
typer.Option(
|
|
266
|
+
'--config',
|
|
267
|
+
metavar='PATH',
|
|
268
|
+
help='Pipeline YAML that includes table_schemas for rendering.',
|
|
269
|
+
show_default=False,
|
|
270
|
+
),
|
|
271
|
+
]
|
|
272
|
+
|
|
273
|
+
RenderOutputOption = Annotated[
|
|
274
|
+
str | None,
|
|
275
|
+
typer.Option(
|
|
276
|
+
'--output',
|
|
277
|
+
'-o',
|
|
278
|
+
metavar='PATH',
|
|
279
|
+
help='Write rendered SQL to PATH (default: stdout).',
|
|
280
|
+
),
|
|
281
|
+
]
|
|
282
|
+
|
|
283
|
+
RenderSpecOption = Annotated[
|
|
284
|
+
str | None,
|
|
285
|
+
typer.Option(
|
|
286
|
+
'--spec',
|
|
287
|
+
metavar='PATH',
|
|
288
|
+
help='Standalone table spec file (.yml/.yaml/.json).',
|
|
289
|
+
show_default=False,
|
|
290
|
+
),
|
|
291
|
+
]
|
|
292
|
+
|
|
293
|
+
RenderTableOption = Annotated[
|
|
294
|
+
str | None,
|
|
295
|
+
typer.Option(
|
|
296
|
+
'--table',
|
|
297
|
+
metavar='NAME',
|
|
298
|
+
help='Filter to a single table name from table_schemas.',
|
|
299
|
+
),
|
|
300
|
+
]
|
|
301
|
+
|
|
302
|
+
RenderTemplateOption = Annotated[
|
|
303
|
+
str,
|
|
304
|
+
typer.Option(
|
|
305
|
+
'--template',
|
|
306
|
+
'-t',
|
|
307
|
+
metavar='KEY|PATH',
|
|
308
|
+
help='Template key (ddl/view) or path to a Jinja template file.',
|
|
309
|
+
show_default=True,
|
|
310
|
+
),
|
|
311
|
+
]
|
|
312
|
+
|
|
313
|
+
RenderTemplatePathOption = Annotated[
|
|
314
|
+
str | None,
|
|
315
|
+
typer.Option(
|
|
316
|
+
'--template-path',
|
|
317
|
+
metavar='PATH',
|
|
318
|
+
help=(
|
|
319
|
+
'Explicit path to a Jinja template file (overrides template key).'
|
|
320
|
+
),
|
|
321
|
+
),
|
|
322
|
+
]
|
|
323
|
+
|
|
261
324
|
|
|
262
325
|
# SECTION: DATA CLASSES ===================================================== #
|
|
263
326
|
|
|
@@ -1001,6 +1064,55 @@ def pipeline_cmd(
|
|
|
1001
1064
|
return int(cmd_pipeline(ns))
|
|
1002
1065
|
|
|
1003
1066
|
|
|
1067
|
+
@app.command('render')
|
|
1068
|
+
def render_cmd(
|
|
1069
|
+
ctx: typer.Context,
|
|
1070
|
+
config: RenderConfigOption = None,
|
|
1071
|
+
spec: RenderSpecOption = None,
|
|
1072
|
+
table: RenderTableOption = None,
|
|
1073
|
+
template: RenderTemplateOption = 'ddl',
|
|
1074
|
+
template_path: RenderTemplatePathOption = None,
|
|
1075
|
+
output: RenderOutputOption = None,
|
|
1076
|
+
) -> int:
|
|
1077
|
+
"""
|
|
1078
|
+
Render SQL DDL from table schemas defined in YAML/JSON configs.
|
|
1079
|
+
|
|
1080
|
+
Parameters
|
|
1081
|
+
----------
|
|
1082
|
+
ctx : typer.Context
|
|
1083
|
+
Typer execution context provided to the command.
|
|
1084
|
+
config : RenderConfigOption, optional
|
|
1085
|
+
Pipeline YAML containing ``table_schemas`` entries.
|
|
1086
|
+
spec : RenderSpecOption, optional
|
|
1087
|
+
Standalone table spec file (.yml/.yaml/.json).
|
|
1088
|
+
table : RenderTableOption, optional
|
|
1089
|
+
Filter to a single table name within the available specs.
|
|
1090
|
+
template : RenderTemplateOption, optional
|
|
1091
|
+
Built-in template key or template file path.
|
|
1092
|
+
template_path : RenderTemplatePathOption, optional
|
|
1093
|
+
Explicit template file path to render with.
|
|
1094
|
+
output : RenderOutputOption, optional
|
|
1095
|
+
Path to write SQL to (stdout when omitted).
|
|
1096
|
+
|
|
1097
|
+
Returns
|
|
1098
|
+
-------
|
|
1099
|
+
int
|
|
1100
|
+
Zero on success.
|
|
1101
|
+
"""
|
|
1102
|
+
state = _ensure_state(ctx)
|
|
1103
|
+
ns = _stateful_namespace(
|
|
1104
|
+
state,
|
|
1105
|
+
command='render',
|
|
1106
|
+
config=config,
|
|
1107
|
+
spec=spec,
|
|
1108
|
+
table=table,
|
|
1109
|
+
template=template,
|
|
1110
|
+
template_path=template_path,
|
|
1111
|
+
output=output,
|
|
1112
|
+
)
|
|
1113
|
+
return int(cmd_render(ns))
|
|
1114
|
+
|
|
1115
|
+
|
|
1004
1116
|
@app.command('run')
|
|
1005
1117
|
def run_cmd(
|
|
1006
1118
|
ctx: typer.Context,
|
|
@@ -18,6 +18,8 @@ from typing import cast
|
|
|
18
18
|
|
|
19
19
|
from ..config import PipelineConfig
|
|
20
20
|
from ..config import load_pipeline_config
|
|
21
|
+
from ..ddl import load_table_spec
|
|
22
|
+
from ..ddl import render_tables
|
|
21
23
|
from ..enums import FileFormat
|
|
22
24
|
from ..extract import extract
|
|
23
25
|
from ..file import File
|
|
@@ -38,6 +40,7 @@ __all__ = [
|
|
|
38
40
|
'cmd_list',
|
|
39
41
|
'cmd_load',
|
|
40
42
|
'cmd_pipeline',
|
|
43
|
+
'cmd_render',
|
|
41
44
|
'cmd_run',
|
|
42
45
|
'cmd_transform',
|
|
43
46
|
'cmd_validate',
|
|
@@ -47,6 +50,37 @@ __all__ = [
|
|
|
47
50
|
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
48
51
|
|
|
49
52
|
|
|
53
|
+
def _collect_table_specs(
|
|
54
|
+
config_path: str | None,
|
|
55
|
+
spec_path: str | None,
|
|
56
|
+
) -> list[dict[str, Any]]:
|
|
57
|
+
"""
|
|
58
|
+
Load table schemas from a pipeline config and/or standalone spec.
|
|
59
|
+
|
|
60
|
+
Parameters
|
|
61
|
+
----------
|
|
62
|
+
config_path : str | None
|
|
63
|
+
Path to a pipeline YAML config file.
|
|
64
|
+
spec_path : str | None
|
|
65
|
+
Path to a standalone table spec file.
|
|
66
|
+
|
|
67
|
+
Returns
|
|
68
|
+
-------
|
|
69
|
+
list[dict[str, Any]]
|
|
70
|
+
Collected table specification mappings.
|
|
71
|
+
"""
|
|
72
|
+
specs: list[dict[str, Any]] = []
|
|
73
|
+
|
|
74
|
+
if spec_path:
|
|
75
|
+
specs.append(load_table_spec(Path(spec_path)))
|
|
76
|
+
|
|
77
|
+
if config_path:
|
|
78
|
+
cfg = load_pipeline_config(config_path, substitute=True)
|
|
79
|
+
specs.extend(getattr(cfg, 'table_schemas', []))
|
|
80
|
+
|
|
81
|
+
return specs
|
|
82
|
+
|
|
83
|
+
|
|
50
84
|
def _emit_json(
|
|
51
85
|
data: Any,
|
|
52
86
|
*,
|
|
@@ -75,6 +109,23 @@ def _emit_json(
|
|
|
75
109
|
print(dumped)
|
|
76
110
|
|
|
77
111
|
|
|
112
|
+
def _explicit_cli_format(
|
|
113
|
+
args: argparse.Namespace,
|
|
114
|
+
) -> str | None:
|
|
115
|
+
"""Return the explicit CLI format hint when provided."""
|
|
116
|
+
|
|
117
|
+
if not getattr(args, '_format_explicit', False):
|
|
118
|
+
return None
|
|
119
|
+
for attr in ('format', 'target_format', 'source_format'):
|
|
120
|
+
value = getattr(args, attr, None)
|
|
121
|
+
if value is None:
|
|
122
|
+
continue
|
|
123
|
+
normalized = value.strip().lower()
|
|
124
|
+
if normalized:
|
|
125
|
+
return normalized
|
|
126
|
+
return None
|
|
127
|
+
|
|
128
|
+
|
|
78
129
|
def _infer_payload_format(
|
|
79
130
|
text: str,
|
|
80
131
|
) -> str:
|
|
@@ -134,23 +185,6 @@ def _list_sections(
|
|
|
134
185
|
return sections
|
|
135
186
|
|
|
136
187
|
|
|
137
|
-
def _explicit_cli_format(
|
|
138
|
-
args: argparse.Namespace,
|
|
139
|
-
) -> str | None:
|
|
140
|
-
"""Return the explicit CLI format hint when provided."""
|
|
141
|
-
|
|
142
|
-
if not getattr(args, '_format_explicit', False):
|
|
143
|
-
return None
|
|
144
|
-
for attr in ('format', 'target_format', 'source_format'):
|
|
145
|
-
value = getattr(args, attr, None)
|
|
146
|
-
if value is None:
|
|
147
|
-
continue
|
|
148
|
-
normalized = value.strip().lower()
|
|
149
|
-
if normalized:
|
|
150
|
-
return normalized
|
|
151
|
-
return None
|
|
152
|
-
|
|
153
|
-
|
|
154
188
|
def _materialize_file_payload(
|
|
155
189
|
source: object,
|
|
156
190
|
*,
|
|
@@ -224,7 +258,6 @@ def _parse_text_payload(
|
|
|
224
258
|
JSONData | str
|
|
225
259
|
The parsed payload as JSON data or raw text.
|
|
226
260
|
"""
|
|
227
|
-
|
|
228
261
|
effective = (fmt or '').strip().lower() or _infer_payload_format(text)
|
|
229
262
|
if effective == 'json':
|
|
230
263
|
return cast(JSONData, json_type(text))
|
|
@@ -265,7 +298,8 @@ def _pipeline_summary(
|
|
|
265
298
|
def _presentation_flags(
|
|
266
299
|
args: argparse.Namespace,
|
|
267
300
|
) -> tuple[bool, bool]:
|
|
268
|
-
"""
|
|
301
|
+
"""
|
|
302
|
+
Return presentation toggles from the parsed namespace.
|
|
269
303
|
|
|
270
304
|
Parameters
|
|
271
305
|
----------
|
|
@@ -342,7 +376,6 @@ def _resolve_cli_payload(
|
|
|
342
376
|
Parsed payload or the original source value when hydration is
|
|
343
377
|
disabled.
|
|
344
378
|
"""
|
|
345
|
-
|
|
346
379
|
if isinstance(source, (os.PathLike, str)) and str(source) == '-':
|
|
347
380
|
text = _read_stdin_text()
|
|
348
381
|
return _parse_text_payload(text, format_hint)
|
|
@@ -628,6 +661,67 @@ def cmd_pipeline(
|
|
|
628
661
|
return 0
|
|
629
662
|
|
|
630
663
|
|
|
664
|
+
def cmd_render(
|
|
665
|
+
args: argparse.Namespace,
|
|
666
|
+
) -> int:
|
|
667
|
+
"""Render SQL DDL statements from table schema specs."""
|
|
668
|
+
|
|
669
|
+
_pretty, quiet = _presentation_flags(args)
|
|
670
|
+
|
|
671
|
+
template_value = getattr(args, 'template', 'ddl') or 'ddl'
|
|
672
|
+
template_path = getattr(args, 'template_path', None)
|
|
673
|
+
table_filter = getattr(args, 'table', None)
|
|
674
|
+
spec_path = getattr(args, 'spec', None)
|
|
675
|
+
config_path = getattr(args, 'config', None)
|
|
676
|
+
|
|
677
|
+
# If the provided template points to a file, treat it as a path override.
|
|
678
|
+
file_override = template_path
|
|
679
|
+
template_key = template_value
|
|
680
|
+
if template_path is None:
|
|
681
|
+
candidate_path = Path(template_value)
|
|
682
|
+
if candidate_path.exists():
|
|
683
|
+
file_override = str(candidate_path)
|
|
684
|
+
template_key = None
|
|
685
|
+
|
|
686
|
+
specs = _collect_table_specs(config_path, spec_path)
|
|
687
|
+
if table_filter:
|
|
688
|
+
specs = [
|
|
689
|
+
spec
|
|
690
|
+
for spec in specs
|
|
691
|
+
if str(spec.get('table')) == table_filter
|
|
692
|
+
or str(spec.get('name', '')) == table_filter
|
|
693
|
+
]
|
|
694
|
+
|
|
695
|
+
if not specs:
|
|
696
|
+
target_desc = table_filter or 'table_schemas'
|
|
697
|
+
print(
|
|
698
|
+
'No table schemas found for '
|
|
699
|
+
f'{target_desc}. Provide --spec or a pipeline --config with '
|
|
700
|
+
'table_schemas.',
|
|
701
|
+
file=sys.stderr,
|
|
702
|
+
)
|
|
703
|
+
return 1
|
|
704
|
+
|
|
705
|
+
rendered_chunks = render_tables(
|
|
706
|
+
specs,
|
|
707
|
+
template=template_key,
|
|
708
|
+
template_path=file_override,
|
|
709
|
+
)
|
|
710
|
+
sql_text = (
|
|
711
|
+
'\n'.join(chunk.rstrip() for chunk in rendered_chunks).rstrip() + '\n'
|
|
712
|
+
)
|
|
713
|
+
|
|
714
|
+
output_path = getattr(args, 'output', None)
|
|
715
|
+
if output_path and output_path != '-':
|
|
716
|
+
Path(output_path).write_text(sql_text, encoding='utf-8')
|
|
717
|
+
if not quiet:
|
|
718
|
+
print(f'Rendered {len(specs)} schema(s) to {output_path}')
|
|
719
|
+
return 0
|
|
720
|
+
|
|
721
|
+
print(sql_text)
|
|
722
|
+
return 0
|
|
723
|
+
|
|
724
|
+
|
|
631
725
|
def cmd_list(args: argparse.Namespace) -> int:
|
|
632
726
|
"""
|
|
633
727
|
Print requested pipeline sections from a YAML configuration.
|
|
@@ -28,6 +28,7 @@ from .handlers import cmd_extract
|
|
|
28
28
|
from .handlers import cmd_list
|
|
29
29
|
from .handlers import cmd_load
|
|
30
30
|
from .handlers import cmd_pipeline
|
|
31
|
+
from .handlers import cmd_render
|
|
31
32
|
from .handlers import cmd_run
|
|
32
33
|
from .handlers import cmd_transform
|
|
33
34
|
from .handlers import cmd_validate
|
|
@@ -441,6 +442,42 @@ def create_parser() -> argparse.ArgumentParser:
|
|
|
441
442
|
)
|
|
442
443
|
pipe_parser.set_defaults(func=cmd_pipeline)
|
|
443
444
|
|
|
445
|
+
render_parser = subparsers.add_parser(
|
|
446
|
+
'render',
|
|
447
|
+
help='Render SQL DDL from table schema specs',
|
|
448
|
+
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
|
|
449
|
+
)
|
|
450
|
+
render_parser.add_argument(
|
|
451
|
+
'--config',
|
|
452
|
+
help='Pipeline YAML containing table_schemas',
|
|
453
|
+
)
|
|
454
|
+
render_parser.add_argument(
|
|
455
|
+
'-o',
|
|
456
|
+
'--output',
|
|
457
|
+
help='Write SQL to this path (stdout when omitted)',
|
|
458
|
+
)
|
|
459
|
+
render_parser.add_argument(
|
|
460
|
+
'--spec',
|
|
461
|
+
help='Standalone table spec file (.yml/.yaml/.json)',
|
|
462
|
+
)
|
|
463
|
+
render_parser.add_argument(
|
|
464
|
+
'--table',
|
|
465
|
+
help='Render only the table matching this name',
|
|
466
|
+
)
|
|
467
|
+
render_parser.add_argument(
|
|
468
|
+
'--template',
|
|
469
|
+
default='ddl',
|
|
470
|
+
help='Template key (ddl/view) or path to a Jinja template file',
|
|
471
|
+
)
|
|
472
|
+
render_parser.add_argument(
|
|
473
|
+
'--template-path',
|
|
474
|
+
dest='template_path',
|
|
475
|
+
help=(
|
|
476
|
+
'Explicit path to a Jinja template file (overrides template key).'
|
|
477
|
+
),
|
|
478
|
+
)
|
|
479
|
+
render_parser.set_defaults(func=cmd_render)
|
|
480
|
+
|
|
444
481
|
list_parser = subparsers.add_parser(
|
|
445
482
|
'list',
|
|
446
483
|
help='List ETL pipeline metadata',
|
|
@@ -190,6 +190,8 @@ class PipelineConfig:
|
|
|
190
190
|
Target connectors, parsed tolerantly.
|
|
191
191
|
jobs : list[JobConfig]
|
|
192
192
|
Job orchestration definitions.
|
|
193
|
+
table_schemas : list[dict[str, Any]]
|
|
194
|
+
Optional DDL-style table specifications used by the render command.
|
|
193
195
|
"""
|
|
194
196
|
|
|
195
197
|
# -- Attributes -- #
|
|
@@ -208,6 +210,7 @@ class PipelineConfig:
|
|
|
208
210
|
transforms: dict[str, dict[str, Any]] = field(default_factory=dict)
|
|
209
211
|
targets: list[Connector] = field(default_factory=list)
|
|
210
212
|
jobs: list[JobConfig] = field(default_factory=list)
|
|
213
|
+
table_schemas: list[dict[str, Any]] = field(default_factory=list)
|
|
211
214
|
|
|
212
215
|
# -- Class Methods -- #
|
|
213
216
|
|
|
@@ -312,6 +315,13 @@ class PipelineConfig:
|
|
|
312
315
|
# Jobs
|
|
313
316
|
jobs = _build_jobs(raw)
|
|
314
317
|
|
|
318
|
+
# Table schemas (optional, tolerant pass-through structures).
|
|
319
|
+
table_schemas: list[dict[str, Any]] = []
|
|
320
|
+
for entry in raw.get('table_schemas', []) or []:
|
|
321
|
+
spec = maybe_mapping(entry)
|
|
322
|
+
if spec is not None:
|
|
323
|
+
table_schemas.append(dict(spec))
|
|
324
|
+
|
|
315
325
|
return cls(
|
|
316
326
|
name=name,
|
|
317
327
|
version=version,
|
|
@@ -325,4 +335,5 @@ class PipelineConfig:
|
|
|
325
335
|
transforms=transforms,
|
|
326
336
|
targets=targets,
|
|
327
337
|
jobs=jobs,
|
|
338
|
+
table_schemas=table_schemas,
|
|
328
339
|
)
|