etlplus 0.5.2__py3-none-any.whl → 0.9.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/enums.py CHANGED
@@ -208,8 +208,19 @@ class FileFormat(CoercibleStrEnum):
208
208
 
209
209
  # -- Constants -- #
210
210
 
211
+ AVRO = 'avro'
211
212
  CSV = 'csv'
213
+ FEATHER = 'feather'
214
+ GZ = 'gz'
212
215
  JSON = 'json'
216
+ NDJSON = 'ndjson'
217
+ ORC = 'orc'
218
+ PARQUET = 'parquet'
219
+ TSV = 'tsv'
220
+ TXT = 'txt'
221
+ XLS = 'xls'
222
+ XLSX = 'xlsx'
223
+ ZIP = 'zip'
213
224
  XML = 'xml'
214
225
  YAML = 'yaml'
215
226
 
@@ -227,11 +238,50 @@ class FileFormat(CoercibleStrEnum):
227
238
  """
228
239
  return {
229
240
  # Common shorthand
241
+ 'parq': 'parquet',
230
242
  'yml': 'yaml',
243
+ # File extensions
244
+ '.avro': 'avro',
245
+ '.csv': 'csv',
246
+ '.feather': 'feather',
247
+ '.gz': 'gz',
248
+ '.json': 'json',
249
+ '.jsonl': 'ndjson',
250
+ '.ndjson': 'ndjson',
251
+ '.orc': 'orc',
252
+ '.parquet': 'parquet',
253
+ '.pq': 'parquet',
254
+ '.tsv': 'tsv',
255
+ '.txt': 'txt',
256
+ '.xls': 'xls',
257
+ '.xlsx': 'xlsx',
258
+ '.zip': 'zip',
259
+ '.xml': 'xml',
260
+ '.yaml': 'yaml',
261
+ '.yml': 'yaml',
231
262
  # MIME types
232
- 'text/csv': 'csv',
263
+ 'application/avro': 'avro',
264
+ 'application/feather': 'feather',
265
+ 'application/gzip': 'gz',
233
266
  'application/json': 'json',
267
+ 'application/jsonlines': 'ndjson',
268
+ 'application/ndjson': 'ndjson',
269
+ 'application/orc': 'orc',
270
+ 'application/vnd.apache.arrow.file': 'feather',
271
+ 'application/vnd.apache.orc': 'orc',
272
+ 'application/vnd.ms-excel': 'xls',
273
+ (
274
+ 'application/vnd.openxmlformats-'
275
+ 'officedocument.spreadsheetml.sheet'
276
+ ): 'xlsx',
277
+ 'application/x-avro': 'avro',
278
+ 'application/x-ndjson': 'ndjson',
279
+ 'application/x-parquet': 'parquet',
234
280
  'application/xml': 'xml',
281
+ 'application/zip': 'zip',
282
+ 'text/csv': 'csv',
283
+ 'text/plain': 'txt',
284
+ 'text/tab-separated-values': 'tsv',
235
285
  }
236
286
 
237
287
 
etlplus/load.py CHANGED
@@ -104,7 +104,7 @@ def load_data(
104
104
  return File(source, FileFormat.JSON).read_json()
105
105
 
106
106
  if isinstance(source, str):
107
- # Special case: '-' means read JSON from stdin (Unix convention).
107
+ # Special case: '-' means read JSON from STDIN (Unix convention).
108
108
  if source == '-':
109
109
  raw = sys.stdin.read()
110
110
  return _parse_json_string(raw)
etlplus/run.py CHANGED
@@ -142,10 +142,8 @@ def run(
142
142
  """
143
143
  Run a pipeline job defined in a YAML configuration.
144
144
 
145
- This mirrors the run-mode logic from ``etlplus.cli.cmd_pipeline``
146
- (without the list/summary modes). By default it reads the configuration
147
- from ``in/pipeline.yml``, but callers can provide an explicit
148
- ``config_path`` to override this.
145
+ By default it reads the configuration from ``in/pipeline.yml``, but callers
146
+ can provide an explicit ``config_path`` to override this.
149
147
 
150
148
  Parameters
151
149
  ----------
etlplus/types.py CHANGED
@@ -225,3 +225,8 @@ type Sleeper = Callable[[float], None]
225
225
 
226
226
  # Numeric timeout in seconds or ``None`` for no timeout.
227
227
  type Timeout = float | None
228
+
229
+ # -- Templates -- #
230
+
231
+ # Allowed template keys for bundled DDL rendering.
232
+ type TemplateKey = Literal['ddl', 'view']
etlplus/utils.py CHANGED
@@ -6,7 +6,6 @@ Small shared helpers used across modules.
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
- import argparse
10
9
  import json
11
10
  from collections.abc import Callable
12
11
  from collections.abc import Mapping
@@ -22,7 +21,6 @@ from .types import StrAnyMap
22
21
  __all__ = [
23
22
  # Data utilities
24
23
  'count_records',
25
- 'json_type',
26
24
  'print_json',
27
25
  # Mapping utilities
28
26
  'cast_str_dict',
@@ -119,35 +117,6 @@ def count_records(
119
117
  return len(data) if isinstance(data, list) else 1
120
118
 
121
119
 
122
- def json_type(
123
- option: str,
124
- ) -> Any:
125
- """
126
- Argparse ``type=`` hook that parses a JSON string.
127
-
128
- Parameters
129
- ----------
130
- option : str
131
- Raw CLI string to parse as JSON.
132
-
133
- Returns
134
- -------
135
- Any
136
- Parsed JSON value.
137
-
138
- Raises
139
- ------
140
- argparse.ArgumentTypeError
141
- If the input cannot be parsed as JSON.
142
- """
143
- try:
144
- return json.loads(option)
145
- except json.JSONDecodeError as e: # pragma: no cover - argparse path
146
- raise argparse.ArgumentTypeError(
147
- f'Invalid JSON: {e.msg} (pos {e.pos})',
148
- ) from e
149
-
150
-
151
120
  def maybe_mapping(
152
121
  value: Any,
153
122
  ) -> StrAnyMap | None:
@@ -181,7 +150,7 @@ def print_json(
181
150
  Returns
182
151
  -------
183
152
  None
184
- This helper writes directly to ``stdout``.
153
+ This helper writes directly to STDOUT.
185
154
  """
186
155
  print(json.dumps(obj, indent=2, ensure_ascii=False))
187
156
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: etlplus
3
- Version: 0.5.2
3
+ Version: 0.9.1
4
4
  Summary: A Swiss Army knife for simple ETL operations
5
5
  Home-page: https://github.com/Dagitali/ETLPlus
6
6
  Author: ETLPlus Team
@@ -21,7 +21,10 @@ Requires-Dist: jinja2>=3.1.6
21
21
  Requires-Dist: pyodbc>=5.3.0
22
22
  Requires-Dist: python-dotenv>=1.2.1
23
23
  Requires-Dist: pandas>=2.3.3
24
+ Requires-Dist: pydantic>=2.12.5
25
+ Requires-Dist: PyYAML>=6.0.3
24
26
  Requires-Dist: requests>=2.32.5
27
+ Requires-Dist: SQLAlchemy>=2.0.45
25
28
  Requires-Dist: typer>=0.21.0
26
29
  Provides-Extra: dev
27
30
  Requires-Dist: black>=25.9.0; extra == "dev"
@@ -61,7 +64,8 @@ package and command-line interface for data extraction, validation, transformati
61
64
  - [Quickstart](#quickstart)
62
65
  - [Usage](#usage)
63
66
  - [Command Line Interface](#command-line-interface)
64
- - [Inspect Pipelines](#inspect-pipelines)
67
+ - [Argument Order and Required Options](#argument-order-and-required-options)
68
+ - [Check Pipelines](#check-pipelines)
65
69
  - [Render SQL DDL](#render-sql-ddl)
66
70
  - [Extract Data](#extract-data)
67
71
  - [Validate Data](#validate-data)
@@ -90,6 +94,14 @@ package and command-line interface for data extraction, validation, transformati
90
94
 
91
95
  ## Features
92
96
 
97
+ - **Check** data pipeline definitions before running them:
98
+ - Summarize jobs, sources, targets, and transforms
99
+ - Confirm configuration changes by printing focused sections on demand
100
+
101
+ - **Render** SQL DDL from shared table specs:
102
+ - Generate CREATE TABLE or view statements
103
+ - Swap templates or direct output to files for database migrations
104
+
93
105
  - **Extract** data from multiple sources:
94
106
  - Files (CSV, JSON, XML, YAML)
95
107
  - Databases (connection string support)
@@ -140,8 +152,8 @@ etlplus --version
140
152
 
141
153
  # One-liner: extract CSV, filter, select, and write JSON
142
154
  etlplus extract file examples/data/sample.csv \
143
- | etlplus transform - --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
144
- -o temp/sample_output.json
155
+ | etlplus transform --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
156
+ - temp/sample_output.json
145
157
  ```
146
158
 
147
159
  [Python API](#python-api):
@@ -171,21 +183,45 @@ etlplus --help
171
183
  etlplus --version
172
184
  ```
173
185
 
174
- #### Inspect Pipelines
186
+ The CLI is implemented with Typer (Click-based). There is no argparse compatibility layer, so rely
187
+ on the documented commands/flags and run `etlplus <command> --help` for current options.
188
+
189
+ **Example error messages:**
190
+
191
+ - If you omit a required argument: `Error: Missing required argument 'SOURCE'.`
192
+ - If you place an option before its argument: `Error: Option '--source-format' must follow the 'SOURCE' argument.`
193
+
194
+ #### Argument Order and Required Options
175
195
 
176
- Use `etlplus list` to explore pipeline YAML definitions without running them. The command can print
196
+ For each command, positional arguments must precede options. Required options must follow their
197
+ associated argument:
198
+
199
+ - **extract**: `etlplus extract SOURCE [--source-format ...] [--source-type ...]`
200
+ - `SOURCE` is required. `--source-format` and `--source-type` must follow `SOURCE`.
201
+ - **transform**: `etlplus transform [--operations ...] SOURCE [--source-format ...] [--source-type ...] TARGET [--target-format ...] [--target-type ...]`
202
+ - `SOURCE` and `TARGET` are required. Format/type options must follow their respective argument.
203
+ - **load**: `etlplus load TARGET [--target-format ...] [--target-type ...] [--source-format ...]`
204
+ - `TARGET` is required. `--target-format` and `--target-type` must follow `TARGET`.
205
+ - **validate**: `etlplus validate SOURCE [--rules ...] [--source-format ...] [--source-type ...]`
206
+ - `SOURCE` is required. `--rules` and format/type options must follow `SOURCE`.
207
+
208
+ If required arguments or options are missing, or if options are placed before their associated argument, the CLI will display a clear error message.
209
+
210
+ #### Check Pipelines
211
+
212
+ Use `etlplus check` to explore pipeline YAML definitions without running them. The command can print
177
213
  job names, summarize configured sources and targets, or drill into specific sections.
178
214
 
179
215
  List jobs and show a pipeline summary:
180
216
  ```bash
181
- etlplus list --config examples/configs/pipeline.yml --jobs
182
- etlplus list --config examples/configs/pipeline.yml --summary
217
+ etlplus check --config examples/configs/pipeline.yml --jobs
218
+ etlplus check --config examples/configs/pipeline.yml --summary
183
219
  ```
184
220
 
185
221
  Show sources or transforms for troubleshooting:
186
222
  ```bash
187
- etlplus list --config examples/configs/pipeline.yml --sources
188
- etlplus list --config examples/configs/pipeline.yml --transforms
223
+ etlplus check --config examples/configs/pipeline.yml --sources
224
+ etlplus check --config examples/configs/pipeline.yml --transforms
189
225
  ```
190
226
 
191
227
  #### Render SQL DDL
@@ -237,7 +273,7 @@ etlplus extract api https://api.example.com/data
237
273
 
238
274
  Save extracted data to file:
239
275
  ```bash
240
- etlplus extract file examples/data/sample.csv -o temp/sample_output.json
276
+ etlplus extract file examples/data/sample.csv > temp/sample_output.json
241
277
  ```
242
278
 
243
279
  #### Validate Data
@@ -256,59 +292,67 @@ etlplus validate examples/data/sample.json --rules '{"email": {"type": "string",
256
292
 
257
293
  When piping data through `etlplus transform`, use `--source-format` whenever the SOURCE argument is
258
294
  `-` or a literal payload, mirroring the `etlplus extract` semantics. Use `--target-format` to
259
- control the emitted format for stdout or other non-file outputs, just like `etlplus load`. File
260
- paths continue to infer formats from their extensions. Use `--from` to override the inferred source
261
- connector type and `--to` to override the inferred target connector type, matching the `etlplus
262
- extract`/`etlplus load` behavior.
295
+ control the emitted format for STDOUT or other non-file outputs, just like `etlplus load`. File
296
+ paths continue to infer formats from their extensions. Use `--source-type` to override the inferred
297
+ source connector type and `--target-type` to override the inferred target connector type, matching
298
+ the `etlplus extract`/`etlplus load` behavior.
263
299
 
264
300
  Transform file inputs while overriding connector types:
265
301
  ```bash
266
- etlplus transform --from file examples/data/sample.json \
302
+ etlplus transform \
267
303
  --operations '{"select": ["name", "email"]}' \
268
- --to file -o temp/selected_output.json
304
+ examples/data/sample.json --source-type file \
305
+ temp/selected_output.json --target-type file
269
306
  ```
270
307
 
271
308
  Filter and select fields:
272
309
  ```bash
273
- etlplus transform '[{"name": "John", "age": 30}, {"name": "Jane", "age": 25}]' \
274
- --operations '{"filter": {"field": "age", "op": "gt", "value": 26}, "select": ["name"]}'
310
+ etlplus transform \
311
+ --operations '{"filter": {"field": "age", "op": "gt", "value": 26}, "select": ["name"]}' \
312
+ '[{"name": "John", "age": 30}, {"name": "Jane", "age": 25}]'
275
313
  ```
276
314
 
277
315
  Sort data:
278
316
  ```bash
279
- etlplus transform examples/data/sample.json --operations '{"sort": {"field": "age", "reverse": true}}'
317
+ etlplus transform \
318
+ --operations '{"sort": {"field": "age", "reverse": true}}' \
319
+ examples/data/sample.json
280
320
  ```
281
321
 
282
322
  Aggregate data:
283
323
  ```bash
284
- etlplus transform examples/data/sample.json --operations '{"aggregate": {"field": "age", "func": "sum"}}'
324
+ etlplus transform \
325
+ --operations '{"aggregate": {"field": "age", "func": "sum"}}' \
326
+ examples/data/sample.json
285
327
  ```
286
328
 
287
329
  Map/rename fields:
288
330
  ```bash
289
- etlplus transform examples/data/sample.json --operations '{"map": {"name": "new_name"}}'
331
+ etlplus transform \
332
+ --operations '{"map": {"name": "new_name"}}' \
333
+ examples/data/sample.json
290
334
  ```
291
335
 
292
336
  #### Load Data
293
337
 
294
- `etlplus load` consumes JSON from stdin; provide only the target argument plus optional flags.
338
+ `etlplus load` consumes JSON from STDIN; provide only the target argument plus optional flags.
295
339
 
296
340
  Load to JSON file:
297
341
  ```bash
298
342
  etlplus extract file examples/data/sample.json \
299
- | etlplus load --to file temp/sample_output.json
343
+ | etlplus load temp/sample_output.json --target-type file
300
344
  ```
301
345
 
302
346
  Load to CSV file:
303
347
  ```bash
304
348
  etlplus extract file examples/data/sample.csv \
305
- | etlplus load --to file temp/sample_output.csv
349
+ | etlplus load temp/sample_output.csv --target-type file
306
350
  ```
307
351
 
308
352
  Load to REST API:
309
353
  ```bash
310
354
  cat examples/data/sample.json \
311
- | etlplus load --to api https://api.example.com/endpoint
355
+ | etlplus load https://api.example.com/endpoint --target-type api
312
356
  ```
313
357
 
314
358
  ### Python API
@@ -350,33 +394,33 @@ CLI quick reference for pipelines:
350
394
 
351
395
  ```bash
352
396
  # List jobs or show a pipeline summary
353
- etlplus list --config examples/configs/pipeline.yml --jobs
354
- etlplus list --config examples/configs/pipeline.yml --summary
397
+ etlplus check --config examples/configs/pipeline.yml --jobs
398
+ etlplus check --config examples/configs/pipeline.yml --summary
355
399
 
356
400
  # Run a job
357
401
  etlplus run --config examples/configs/pipeline.yml --job file_to_file_customers
358
-
359
- # Deprecated shim (will be removed): etlplus pipeline
360
402
  ```
361
403
 
362
404
  ### Complete ETL Pipeline Example
363
405
 
364
406
  ```bash
365
407
  # 1. Extract from CSV
366
- etlplus extract file examples/data/sample.csv -o temp/sample_extracted.json
408
+ etlplus extract file examples/data/sample.csv > temp/sample_extracted.json
367
409
 
368
410
  # 2. Transform (filter and select fields)
369
- etlplus transform temp/sample_extracted.json \
411
+ etlplus transform \
370
412
  --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
371
- -o temp/sample_transformed.json
413
+ temp/sample_extracted.json \
414
+ temp/sample_transformed.json
372
415
 
373
416
  # 3. Validate transformed data
374
- etlplus validate temp/sample_transformed.json \
375
- --rules '{"name": {"type": "string", "required": true}, "email": {"type": "string", "required": true}}'
417
+ etlplus validate \
418
+ --rules '{"name": {"type": "string", "required": true}, "email": {"type": "string", "required": true}}' \
419
+ temo/sample_transformed.json
376
420
 
377
421
  # 4. Load to CSV
378
422
  cat temp/sample_transformed.json \
379
- | etlplus load --to temp/sample_output.csv
423
+ | etlplus load temp/sample_output.csv
380
424
  ```
381
425
 
382
426
  ### Format Overrides
@@ -389,14 +433,14 @@ Examples (zsh):
389
433
 
390
434
  ```zsh
391
435
  # Force CSV parsing for an extension-less file
392
- etlplus extract --from file data.txt --source-format csv
436
+ etlplus extract data.txt --source-type file --source-format csv
393
437
 
394
438
  # Write CSV to a file without the .csv suffix
395
- etlplus load --to file output.bin --target-format csv < data.json
439
+ etlplus load output.bin --target-type file --target-format csv < data.json
396
440
 
397
441
  # Leave the flags off when extensions already match the desired format
398
- etlplus extract --from file data.csv
399
- etlplus load --to file data.json < data.json
442
+ etlplus extract data.csv --source-type file
443
+ etlplus load data.json --target-type file < data.json
400
444
  ```
401
445
 
402
446
  ## Transformation Operations
@@ -1,20 +1,19 @@
1
1
  etlplus/__init__.py,sha256=M2gScnyir6WOMAh_EuoQIiAzdcTls0_5hbd_Q6of8I0,1021
2
2
  etlplus/__main__.py,sha256=btoROneNiigyfBU7BSzPKZ1R9gzBMpxcpsbPwmuHwTM,479
3
3
  etlplus/__version__.py,sha256=1E0GMK_yUWCMQFKxXjTvyMwofi0qT2k4CDNiHWiymWE,327
4
- etlplus/ddl.py,sha256=uYkiMTx1uDlUypnXCYy0K5ARnHRMHFVzzg8PizBQRLg,5306
5
- etlplus/enums.py,sha256=V_j18Ud2BCXpFsBk2pZGrvCVrvAMJ7uja1z9fppFGso,10175
4
+ etlplus/enums.py,sha256=kWDXOOhyYodhCxXDgQ_gP7000nO2i0kwpve8AUkr77k,11763
6
5
  etlplus/extract.py,sha256=f44JdHhNTACxgn44USx05paKTwq7LQY-V4wANCW9hVM,6173
7
6
  etlplus/file.py,sha256=RxIAsGDN4f_vNA2B5-ct88JNd_ISAyYbooIRE5DstS8,17972
8
- etlplus/load.py,sha256=BwF3gT4gIr-5CvNMz_aLTCl-w2ihWSTxNVd4X92XFwI,8737
7
+ etlplus/load.py,sha256=R_y0_vtsEo1bwxWVQu2bfhB5ZIJoIoWu2ycCdvY4RnE,8737
9
8
  etlplus/mixins.py,sha256=ifGpHwWv7U00yqGf-kN93vJax2IiK4jaGtTsPsO3Oak,1350
10
9
  etlplus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- etlplus/run.py,sha256=zl_Yx35spcgaa9Xx7-kcJEb1CAYyMIiqtPlsSrYxRfs,12448
10
+ etlplus/run.py,sha256=X4kp5FQlIWVf1_d9oSrchKau7BFDCE1Zkscvu7WPaWw,12340
12
11
  etlplus/run_helpers.py,sha256=bj6MkaeFxjl3CeKG1HoXKx5DwAlXNERVW-GX-z1P_qQ,24373
13
12
  etlplus/transform.py,sha256=uAUVDDHYCgx7GpVez9IK3OAZM-CnCuMa9iox3vwGGJA,25296
14
- etlplus/types.py,sha256=SJiZ7wJiSnV4CEvF-9E5nSFLBo4DT9OqHQqj1GSHkv8,6042
15
- etlplus/utils.py,sha256=_fn8b-SAdxiw28VX-Ugr8sZUPZI9mEkWKAGExlgxhJA,13993
13
+ etlplus/types.py,sha256=1hsDlnF6r76zAwaUYay-i6pCM-Y0IU5nP7Crj8PLCQ4,6157
14
+ etlplus/utils.py,sha256=BMLTWAvCJj3zLEcffBgURYnu0UGhhXsfH2WWpAt7fV8,13363
16
15
  etlplus/validate.py,sha256=7rJoEI_SIILdPpoBqqh2UJqg9oeReDz34mYSlc3t7Qg,12989
17
- etlplus/api/README.md,sha256=UkK5PiZWXbbnMNP0MaPa56S88PjSqOwhMNCyswOhvKc,7329
16
+ etlplus/api/README.md,sha256=ZiyjxLz0LfFCzeYKXwtH8yY1OJ4hXCju7t2ICroFoU8,7215
18
17
  etlplus/api/__init__.py,sha256=P2JUYFy6Ep4t6xnsBiCBfQCkQLHYYhA-yXPXCobS8Y0,4295
19
18
  etlplus/api/auth.py,sha256=GOO5on-LoMS1GXTAhtK9rFcfpjbBcNeA6NE5UZwIq0g,12158
20
19
  etlplus/api/config.py,sha256=wRpOaZ31sPReVzEMme0jKl_37nqgraESwuYSNxP_xDo,17397
@@ -32,9 +31,14 @@ etlplus/api/rate_limiting/__init__.py,sha256=ZySB1dZettEDnWvI1EHf_TZ9L08M_kKsNR-
32
31
  etlplus/api/rate_limiting/config.py,sha256=2b4wIynblN-1EyMqI4aXa71SljzSjXYh5N1Nngr3jOg,9406
33
32
  etlplus/api/rate_limiting/rate_limiter.py,sha256=Uxozqd_Ej5Lsj-M-mLT2WexChgWh7x35_YP10yqYPQA,7159
34
33
  etlplus/cli/__init__.py,sha256=J97-Rv931IL1_b4AXnB7Fbbd7HKnHBpx18NQfC_kE6c,299
35
- etlplus/cli/app.py,sha256=buGIIoSIu5cxbYTdPcA_iaxJaPG-eHj-LPD9OgZ0h9w,35824
36
- etlplus/cli/handlers.py,sha256=O7Mh9nowdMCzaV36KASWZVC4fNMEg9xnVZXE7NHW6P8,18873
37
- etlplus/cli/main.py,sha256=5qWAKqlRtnb4VEpBfGT45q-LBxi_2hSMnw23jNyYA_Q,16497
34
+ etlplus/cli/commands.py,sha256=BK2qmFsser6AXOgEvpiadrYMIiwviAzqkSxMlBhRXRw,24670
35
+ etlplus/cli/constants.py,sha256=KIZj7J2tNf5mJbkqAdZmu5FXYW2FQmxwgeOKWc3-3Hg,1944
36
+ etlplus/cli/handlers.py,sha256=K0GazvrPgocJ-63HZqF0xhyJk8TB1Gcj-eIbWltXKRU,17759
37
+ etlplus/cli/io.py,sha256=7sldiZz4-Geomge5IO_XYykXPa6UiORfUWzLCdQePG8,7846
38
+ etlplus/cli/main.py,sha256=IgeqxypixfwLHR-QcpgVMQ7vMZ865bXOh2oO9v-BWeM,5234
39
+ etlplus/cli/options.py,sha256=vfXT3YLh7wG1iC-aTdSg6ItMC8l6n0Lozmy53XjqLbA,1199
40
+ etlplus/cli/state.py,sha256=Pfd8ru0wYIN7eGp1_A0tioqs1LiCDZCuJ6AnjZb6yYQ,8027
41
+ etlplus/cli/types.py,sha256=tclhKVJXDqHzlTQBYKARfqMgDOcuBJ-Zej2pvFy96WM,652
38
42
  etlplus/config/__init__.py,sha256=VZWzOg7d2YR9NT6UwKTv44yf2FRUMjTHynkm1Dl5Qzo,1486
39
43
  etlplus/config/connector.py,sha256=0-TIwevHbKRHVmucvyGpPd-3tB1dKHB-dj0yJ6kq5eY,9809
40
44
  etlplus/config/jobs.py,sha256=hmzRCqt0OvCEZZR4ONKrd3lvSv0OmayjLc4yOBk3ug8,7399
@@ -42,14 +46,20 @@ etlplus/config/pipeline.py,sha256=Va4MQY6KEyKqHGMKPmh09ZcGpx95br-iNUjpkqtzVbw,95
42
46
  etlplus/config/profile.py,sha256=Ss2zedQGjkaGSpvBLTD4SZaWViMJ7TJPLB8Q2_BTpPg,1898
43
47
  etlplus/config/types.py,sha256=a0epJ3z16HQ5bY3Ctf8s_cQPa3f0HHcwdOcjCP2xoG4,4954
44
48
  etlplus/config/utils.py,sha256=4SUHMkt5bKBhMhiJm-DrnmE2Q4TfOgdNCKz8PJDS27o,3443
49
+ etlplus/database/__init__.py,sha256=AKJsDl2RHuRGPS-eXgNJeh4aSncJP5Y0yLApBF6i7i8,1052
50
+ etlplus/database/ddl.py,sha256=z9KvHi1MPhPBLHxMDdqJgLTp3A2-lcz0gqhZ7HIE6kU,7916
51
+ etlplus/database/engine.py,sha256=7rr7YndA8LwyWJL8k1YhQbqxxmW4gWEUQjp0NwQcYtc,4061
52
+ etlplus/database/orm.py,sha256=gCSqH-CjQz6tV9133-VqgiwokK5ylun0BwXaIWfImAo,10008
53
+ etlplus/database/schema.py,sha256=HNTgglI8qvQLInr7gq--2lLmLKHzAZTL2MJUOIw9DlY,7025
54
+ etlplus/database/types.py,sha256=_pkQyC14TzAlgyeIqZG4F5LWYknZbHw3TW68Auk7Ya0,795
45
55
  etlplus/templates/__init__.py,sha256=tsniN7XJYs3NwYxJ6c2HD5upHP3CDkLx-bQCMt97UOM,106
46
56
  etlplus/templates/ddl.sql.j2,sha256=s8fMWvcb4eaJVXkifuib1aQPljtZ8buuyB_uA-ZdU3Q,4734
47
57
  etlplus/templates/view.sql.j2,sha256=Iy8DHfhq5yyvrUKDxqp_aHIEXY4Tm6j4wT7YDEFWAhk,2180
48
58
  etlplus/validation/__init__.py,sha256=Pe5Xg1_EA4uiNZGYu5WTF3j7odjmyxnAJ8rcioaplSQ,1254
49
59
  etlplus/validation/utils.py,sha256=Mtqg449VIke0ziy_wd2r6yrwJzQkA1iulZC87FzXMjo,10201
50
- etlplus-0.5.2.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
51
- etlplus-0.5.2.dist-info/METADATA,sha256=ow6T-Op0DnqalPB2eMLgaJ0s-a3WQUJ4wBQs_HxrQ9k,18936
52
- etlplus-0.5.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
53
- etlplus-0.5.2.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
54
- etlplus-0.5.2.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
55
- etlplus-0.5.2.dist-info/RECORD,,
60
+ etlplus-0.9.1.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
61
+ etlplus-0.9.1.dist-info/METADATA,sha256=FuV00vTR_UMB8DEjeUg9SWIxYU4EibsWmsCbra_1fmY,21035
62
+ etlplus-0.9.1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
+ etlplus-0.9.1.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
64
+ etlplus-0.9.1.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
65
+ etlplus-0.9.1.dist-info/RECORD,,