etlplus 0.3.25__py3-none-any.whl → 0.4.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/cli/main.py ADDED
@@ -0,0 +1,461 @@
1
+ """
2
+ :mod:`etlplus.cli.main` module.
3
+
4
+ Entry point helpers for the Typer-powered ``etlplus`` CLI.
5
+
6
+ This module exposes :func:`main` for the console script as well as
7
+ :func:`create_parser` for callers that still need an ``argparse`` parser.
8
+ """
9
+
10
+ from __future__ import annotations
11
+
12
+ import argparse
13
+ import sys
14
+ from collections.abc import Sequence
15
+ from typing import Literal
16
+
17
+ import typer
18
+
19
+ from .. import __version__
20
+ from ..enums import DataConnectorType
21
+ from ..enums import FileFormat
22
+ from ..utils import json_type
23
+ from .app import PROJECT_URL
24
+ from .app import app
25
+ from .handlers import cmd_extract
26
+ from .handlers import cmd_list
27
+ from .handlers import cmd_load
28
+ from .handlers import cmd_pipeline
29
+ from .handlers import cmd_run
30
+ from .handlers import cmd_transform
31
+ from .handlers import cmd_validate
32
+
33
+ # SECTION: EXPORTS ========================================================== #
34
+
35
+
36
+ __all__ = [
37
+ # Functions
38
+ 'create_parser',
39
+ 'main',
40
+ ]
41
+
42
+
43
+ # SECTION: TYPE ALIASES ===================================================== #
44
+
45
+
46
+ type FormatContext = Literal['source', 'target']
47
+
48
+
49
+ # SECTION: INTERNAL CLASSES ================================================= #
50
+
51
+
52
+ class _FormatAction(argparse.Action):
53
+ """
54
+ Argparse action that records when ``--source-format`` or
55
+ ``--target-format`` is provided."""
56
+
57
+ def __call__(
58
+ self,
59
+ parser: argparse.ArgumentParser,
60
+ namespace: argparse.Namespace,
61
+ values: str | Sequence[object] | None,
62
+ option_string: str | None = None,
63
+ ) -> None: # pragma: no cover
64
+ setattr(namespace, self.dest, values)
65
+ namespace._format_explicit = True
66
+
67
+
68
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
69
+
70
+
71
+ def _add_config_option(
72
+ parser: argparse.ArgumentParser,
73
+ *,
74
+ required: bool = True,
75
+ ) -> None:
76
+ """Attach the shared ``--config`` option used by legacy commands.
77
+
78
+ Parameters
79
+ ----------
80
+ parser : argparse.ArgumentParser
81
+ Parser receiving the option.
82
+ required : bool, optional
83
+ Whether the flag must be provided. Defaults to ``True``.
84
+ """
85
+
86
+ parser.add_argument(
87
+ '--config',
88
+ required=required,
89
+ help='Path to pipeline YAML configuration file',
90
+ )
91
+
92
+
93
+ def _add_format_options(
94
+ parser: argparse.ArgumentParser,
95
+ *,
96
+ context: FormatContext,
97
+ ) -> None:
98
+ """
99
+ Attach shared ``--source-format`` or ``--target-format`` options to
100
+ extract/load parsers.
101
+
102
+ Parameters
103
+ ----------
104
+ parser : argparse.ArgumentParser
105
+ Parser to augment.
106
+ context : FormatContext
107
+ Context for the format option: either ``'source'`` or ``'target'``
108
+ """
109
+ parser.set_defaults(_format_explicit=False)
110
+ parser.add_argument(
111
+ '--source-format',
112
+ choices=list(FileFormat.choices()),
113
+ default='json',
114
+ action=_FormatAction,
115
+ help=(
116
+ f'Format of the {context}. Overrides filename-based inference '
117
+ 'when provided.'
118
+ ),
119
+ )
120
+ parser.add_argument(
121
+ '--target-format',
122
+ choices=list(FileFormat.choices()),
123
+ default='json',
124
+ action=_FormatAction,
125
+ help=(
126
+ f'Format of the {context}. Overrides filename-based inference '
127
+ 'when provided.'
128
+ ),
129
+ )
130
+
131
+
132
+ def _add_boolean_flag(
133
+ parser: argparse.ArgumentParser,
134
+ *,
135
+ name: str,
136
+ help_text: str,
137
+ ) -> None:
138
+ """Add a toggle that also supports the ``--no-`` prefix via 3.13.
139
+
140
+ Parameters
141
+ ----------
142
+ parser : argparse.ArgumentParser
143
+ Parser receiving the flag.
144
+ name : str
145
+ Primary flag name without leading dashes.
146
+ help_text : str
147
+ Help text rendered in ``--help`` output.
148
+ """
149
+
150
+ parser.add_argument(
151
+ f'--{name}',
152
+ action=argparse.BooleanOptionalAction,
153
+ default=False,
154
+ help=help_text,
155
+ )
156
+
157
+
158
+ def _cli_description() -> str:
159
+ return '\n'.join(
160
+ [
161
+ 'ETLPlus - A Swiss Army knife for simple ETL operations.',
162
+ '',
163
+ ' Provide a subcommand and options. Examples:',
164
+ '',
165
+ ' etlplus extract file in.csv > out.json',
166
+ ' etlplus validate in.json --rules \'{"required": ["id"]}\'',
167
+ (
168
+ ' etlplus transform --from file in.csv --operations '
169
+ '\'{"select": ["id"]}\' --to file -o out.json'
170
+ ),
171
+ ' etlplus extract in.csv | etlplus load --to file out.json',
172
+ '',
173
+ ' Override format inference when extensions are misleading:',
174
+ '',
175
+ ' etlplus extract data.txt --source-format csv',
176
+ ' etlplus load payload.bin --target-format json',
177
+ ],
178
+ )
179
+
180
+
181
+ def _cli_epilog() -> str:
182
+ return '\n'.join(
183
+ [
184
+ 'Tip:',
185
+ ' --source-format and --target-format override format '
186
+ 'inference based on filename extensions when needed.',
187
+ ],
188
+ )
189
+
190
+
191
+ # SECTION: FUNCTIONS ======================================================== #
192
+
193
+
194
+ def create_parser() -> argparse.ArgumentParser:
195
+ """
196
+ Return the legacy :mod:`argparse` parser wired to current handlers.
197
+
198
+ Returns
199
+ -------
200
+ argparse.ArgumentParser
201
+ Parser compatible with historical ``etlplus`` entry points.
202
+ """
203
+
204
+ parser = argparse.ArgumentParser(
205
+ prog='etlplus',
206
+ description=_cli_description(),
207
+ epilog=_cli_epilog(),
208
+ formatter_class=argparse.RawDescriptionHelpFormatter,
209
+ )
210
+
211
+ parser.add_argument(
212
+ '-V',
213
+ '--version',
214
+ action='version',
215
+ version=f'%(prog)s {__version__}',
216
+ )
217
+
218
+ subparsers = parser.add_subparsers(
219
+ dest='command',
220
+ help='Available commands',
221
+ )
222
+ subparsers.required = True
223
+
224
+ extract_parser = subparsers.add_parser(
225
+ 'extract',
226
+ help='Extract data from sources (files, databases, REST APIs)',
227
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
228
+ )
229
+ extract_parser.add_argument(
230
+ 'source_type',
231
+ choices=list(DataConnectorType.choices()),
232
+ help='Type of source to extract from',
233
+ )
234
+ extract_parser.add_argument(
235
+ 'source',
236
+ help=(
237
+ 'Source location (file path, database connection string, '
238
+ 'or API URL)'
239
+ ),
240
+ )
241
+ _add_format_options(extract_parser, context='source')
242
+ extract_parser.set_defaults(func=cmd_extract)
243
+
244
+ validate_parser = subparsers.add_parser(
245
+ 'validate',
246
+ help='Validate data from sources',
247
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
248
+ )
249
+ validate_parser.add_argument(
250
+ 'source',
251
+ help='Data source to validate (file path or JSON string)',
252
+ )
253
+ validate_parser.add_argument(
254
+ '--rules',
255
+ type=json_type,
256
+ default={},
257
+ help='Validation rules as JSON string',
258
+ )
259
+ validate_parser.set_defaults(func=cmd_validate)
260
+
261
+ transform_parser = subparsers.add_parser(
262
+ 'transform',
263
+ help='Transform data',
264
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
265
+ )
266
+ transform_parser.add_argument(
267
+ 'source',
268
+ help='Data source to transform (file path or JSON string)',
269
+ )
270
+ transform_parser.add_argument(
271
+ '--operations',
272
+ type=json_type,
273
+ default={},
274
+ help='Transformation operations as JSON string',
275
+ )
276
+ transform_parser.add_argument(
277
+ '--from',
278
+ dest='from_',
279
+ choices=list(DataConnectorType.choices()),
280
+ help='Override the inferred source type (file, database, api).',
281
+ )
282
+ transform_parser.add_argument(
283
+ '--to',
284
+ dest='to',
285
+ choices=list(DataConnectorType.choices()),
286
+ help='Override the inferred target type (file, database, api).',
287
+ )
288
+ transform_parser.add_argument(
289
+ '--source-format',
290
+ choices=list(FileFormat.choices()),
291
+ dest='source_format',
292
+ help=(
293
+ 'Input payload format when SOURCE is - or a literal payload. '
294
+ 'File sources infer format from the extension.'
295
+ ),
296
+ )
297
+ transform_parser.add_argument(
298
+ '--target-format',
299
+ dest='target_format',
300
+ choices=list(FileFormat.choices()),
301
+ help=(
302
+ 'Output payload format '
303
+ 'when writing to stdout or non-file targets. '
304
+ 'File targets infer format from the extension.'
305
+ ),
306
+ )
307
+ transform_parser.set_defaults(func=cmd_transform)
308
+
309
+ load_parser = subparsers.add_parser(
310
+ 'load',
311
+ help='Load data to targets (files, databases, REST APIs)',
312
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
313
+ )
314
+ load_parser.add_argument(
315
+ 'source',
316
+ help='Data source to load (file path or JSON string)',
317
+ )
318
+ load_parser.add_argument(
319
+ 'target_type',
320
+ choices=list(DataConnectorType.choices()),
321
+ help='Type of target to load to',
322
+ )
323
+ load_parser.add_argument(
324
+ 'target',
325
+ help=(
326
+ 'Target location (file path, database connection string, '
327
+ 'or API URL)'
328
+ ),
329
+ )
330
+ _add_format_options(load_parser, context='target')
331
+ load_parser.set_defaults(func=cmd_load)
332
+
333
+ pipe_parser = subparsers.add_parser(
334
+ 'pipeline',
335
+ help=(
336
+ 'Inspect or run pipeline YAML (see '
337
+ f'{PROJECT_URL}/blob/main/docs/pipeline-guide.md)'
338
+ ),
339
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
340
+ )
341
+ _add_config_option(pipe_parser)
342
+ pipe_parser.add_argument(
343
+ '--list',
344
+ action='store_true',
345
+ help='List available job names and exit',
346
+ )
347
+ pipe_parser.add_argument(
348
+ '--run',
349
+ metavar='JOB',
350
+ help='Run a specific job by name',
351
+ )
352
+ pipe_parser.set_defaults(func=cmd_pipeline)
353
+
354
+ list_parser = subparsers.add_parser(
355
+ 'list',
356
+ help='List ETL pipeline metadata',
357
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
358
+ )
359
+ _add_config_option(list_parser)
360
+ _add_boolean_flag(
361
+ list_parser,
362
+ name='pipelines',
363
+ help_text='List ETL pipelines',
364
+ )
365
+ _add_boolean_flag(
366
+ list_parser,
367
+ name='sources',
368
+ help_text='List data sources',
369
+ )
370
+ _add_boolean_flag(
371
+ list_parser,
372
+ name='targets',
373
+ help_text='List data targets',
374
+ )
375
+ _add_boolean_flag(
376
+ list_parser,
377
+ name='transforms',
378
+ help_text='List data transforms',
379
+ )
380
+ list_parser.set_defaults(func=cmd_list)
381
+
382
+ run_parser = subparsers.add_parser(
383
+ 'run',
384
+ help=(
385
+ 'Run an ETL pipeline '
386
+ f'(see {PROJECT_URL}/blob/main/docs/run-module.md)'
387
+ ),
388
+ formatter_class=argparse.ArgumentDefaultsHelpFormatter,
389
+ )
390
+ _add_config_option(run_parser)
391
+ run_parser.add_argument(
392
+ '-j',
393
+ '--job',
394
+ help='Name of the job to run',
395
+ )
396
+ run_parser.add_argument(
397
+ '-p',
398
+ '--pipeline',
399
+ help='Name of the pipeline to run',
400
+ )
401
+ run_parser.set_defaults(func=cmd_run)
402
+
403
+ return parser
404
+
405
+
406
+ def main(
407
+ argv: list[str] | None = None,
408
+ ) -> int:
409
+ """
410
+ Run the Typer-powered CLI and normalize exit codes.
411
+
412
+ Parameters
413
+ ----------
414
+ argv : list[str] | None, optional
415
+ Sequence of command-line arguments excluding the program name. When
416
+ ``None``, defaults to ``sys.argv[1:]``.
417
+
418
+ Returns
419
+ -------
420
+ int
421
+ A conventional POSIX exit code: zero on success, non-zero on error.
422
+
423
+ Raises
424
+ ------
425
+ SystemExit
426
+ Re-raises SystemExit exceptions to preserve exit codes.
427
+
428
+ Notes
429
+ -----
430
+ This function uses Typer (Click) for parsing/dispatch, but preserves the
431
+ existing `cmd_*` handlers by adapting parsed arguments into an
432
+ :class:`argparse.Namespace`.
433
+ """
434
+ resolved_argv = sys.argv[1:] if argv is None else list(argv)
435
+ command = typer.main.get_command(app)
436
+
437
+ try:
438
+ result = command.main(
439
+ args=resolved_argv,
440
+ prog_name='etlplus',
441
+ standalone_mode=False,
442
+ )
443
+ return int(result or 0)
444
+
445
+ except typer.Exit as exc:
446
+ return int(exc.exit_code)
447
+
448
+ except typer.Abort:
449
+ return 1
450
+
451
+ except KeyboardInterrupt: # pragma: no cover - interactive path
452
+ # Conventional exit code for SIGINT
453
+ return 130
454
+
455
+ except SystemExit as e:
456
+ print(f'Error: {e}', file=sys.stderr)
457
+ raise e
458
+
459
+ except (OSError, TypeError, ValueError) as e:
460
+ print(f'Error: {e}', file=sys.stderr)
461
+ return 1
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: etlplus
3
- Version: 0.3.25
3
+ Version: 0.4.6
4
4
  Summary: A Swiss Army knife for simple ETL operations
5
5
  Home-page: https://github.com/Dagitali/ETLPlus
6
6
  Author: ETLPlus Team
@@ -22,6 +22,7 @@ Requires-Dist: pyodbc>=5.3.0
22
22
  Requires-Dist: python-dotenv>=1.2.1
23
23
  Requires-Dist: pandas>=2.3.3
24
24
  Requires-Dist: requests>=2.32.5
25
+ Requires-Dist: typer>=0.21.0
25
26
  Provides-Extra: dev
26
27
  Requires-Dist: black>=25.9.0; extra == "dev"
27
28
  Requires-Dist: build>=1.2.2; extra == "dev"
@@ -66,7 +67,7 @@ package and command-line interface for data extraction, validation, transformati
66
67
  - [Load Data](#load-data)
67
68
  - [Python API](#python-api)
68
69
  - [Complete ETL Pipeline Example](#complete-etl-pipeline-example)
69
- - [Environment Variables](#environment-variables)
70
+ - [Format Overrides](#format-overrides)
70
71
  - [Transformation Operations](#transformation-operations)
71
72
  - [Filter Operations](#filter-operations)
72
73
  - [Aggregation Functions](#aggregation-functions)
@@ -78,6 +79,8 @@ package and command-line interface for data extraction, validation, transformati
78
79
  - [Test Layers](#test-layers)
79
80
  - [Code Coverage](#code-coverage)
80
81
  - [Linting](#linting)
82
+ - [Updating Demo Snippets](#updating-demo-snippets)
83
+ - [Releasing to PyPI](#releasing-to-pypi)
81
84
  - [Links](#links)
82
85
  - [License](#license)
83
86
  - [Contributing](#contributing)
@@ -168,9 +171,9 @@ etlplus --version
168
171
 
169
172
  #### Extract Data
170
173
 
171
- Note: For file sources, the format is inferred from the filename extension; the `--format` option is
172
- ignored. To treat passing `--format` as an error for file sources, either set
173
- `ETLPLUS_FORMAT_BEHAVIOR=error` or pass the CLI flag `--strict-format`.
174
+ Note: For file sources, the format is normally inferred from the filename extension. Use
175
+ `--source-format` to override inference when a file lacks an extension or when you want to force a
176
+ specific parser.
174
177
 
175
178
  Extract from JSON file:
176
179
  ```bash
@@ -211,6 +214,20 @@ etlplus validate examples/data/sample.json --rules '{"email": {"type": "string",
211
214
 
212
215
  #### Transform Data
213
216
 
217
+ When piping data through `etlplus transform`, use `--source-format` whenever the SOURCE argument is
218
+ `-` or a literal payload, mirroring the `etlplus extract` semantics. Use `--target-format` to
219
+ control the emitted format for stdout or other non-file outputs, just like `etlplus load`. File
220
+ paths continue to infer formats from their extensions. Use `--from` to override the inferred source
221
+ connector type and `--to` to override the inferred target connector type, matching the `etlplus
222
+ extract`/`etlplus load` behavior.
223
+
224
+ Transform file inputs while overriding connector types:
225
+ ```bash
226
+ etlplus transform --from file examples/data/sample.json \
227
+ --operations '{"select": ["name", "email"]}' \
228
+ --to file -o temp/selected_output.json
229
+ ```
230
+
214
231
  Filter and select fields:
215
232
  ```bash
216
233
  etlplus transform '[{"name": "John", "age": 30}, {"name": "Jane", "age": 25}]' \
@@ -234,19 +251,24 @@ etlplus transform examples/data/sample.json --operations '{"map": {"name": "new_
234
251
 
235
252
  #### Load Data
236
253
 
254
+ `etlplus load` consumes JSON from stdin; provide only the target argument plus optional flags.
255
+
237
256
  Load to JSON file:
238
257
  ```bash
239
- etlplus load '{"name": "John", "age": 30}' file temp/sample_output.json
258
+ etlplus extract file examples/data/sample.json \
259
+ | etlplus load --to file temp/sample_output.json
240
260
  ```
241
261
 
242
262
  Load to CSV file:
243
263
  ```bash
244
- etlplus load '[{"name": "John", "age": 30}]' file temp/sample_output.csv
264
+ etlplus extract file examples/data/sample.csv \
265
+ | etlplus load --to file temp/sample_output.csv
245
266
  ```
246
267
 
247
268
  Load to REST API:
248
269
  ```bash
249
- etlplus load examples/data/sample.json api https://api.example.com/endpoint
270
+ cat examples/data/sample.json \
271
+ | etlplus load --to api https://api.example.com/endpoint
250
272
  ```
251
273
 
252
274
  ### Python API
@@ -300,41 +322,28 @@ etlplus validate temp/sample_transformed.json \
300
322
  --rules '{"name": {"type": "string", "required": true}, "email": {"type": "string", "required": true}}'
301
323
 
302
324
  # 4. Load to CSV
303
- etlplus load temp/sample_transformed.json file temp/sample_output.csv
325
+ cat temp/sample_transformed.json \
326
+ | etlplus load --to temp/sample_output.csv
304
327
  ```
305
328
 
306
- ### Environment Variables
307
-
308
- ETLPlus honors a small number of environment toggles to refine CLI behavior:
329
+ ### Format Overrides
309
330
 
310
- - `ETLPLUS_FORMAT_BEHAVIOR`: controls what happens when `--format` is provided for
311
- file sources or targets (extract/load) where the format is inferred from the
312
- filename extension.
313
- - `error|fail|strict`: treat as error (non-zero exit)
314
- - `warn` (default): print a warning to stderr
315
- - `ignore|silent`: no message
316
- - Precedence: the CLI flag `--strict-format` overrides the environment.
331
+ `--source-format` and `--target-format` override whichever format would normally be inferred from a
332
+ file extension. This is useful when an input lacks an extension (for example, `records.txt` that
333
+ actually contains CSV) or when you intentionally want to treat a file as another format.
317
334
 
318
335
  Examples (zsh):
319
336
 
320
337
  ```zsh
321
- # Warn (default)
322
- etlplus extract file data.csv --format csv
323
- etlplus load data.json file out.csv --format csv
324
-
325
- # Enforce error via environment
326
- ETLPLUS_FORMAT_BEHAVIOR=error \
327
- etlplus extract file data.csv --format csv
328
- ETLPLUS_FORMAT_BEHAVIOR=error \
329
- etlplus load data.json file out.csv --format csv
330
-
331
- # Equivalent strict behavior via flag (overrides environment)
332
- etlplus extract file data.csv --format csv --strict-format
333
- etlplus load data.json file out.csv --format csv --strict-format
334
-
335
- # Recommended: rely on extension, no --format needed for files
336
- etlplus extract file data.csv
337
- etlplus load data.json file out.csv
338
+ # Force CSV parsing for an extension-less file
339
+ etlplus extract --from file data.txt --source-format csv
340
+
341
+ # Write CSV to a file without the .csv suffix
342
+ etlplus load --to file output.bin --target-format csv < data.json
343
+
344
+ # Leave the flags off when extensions already match the desired format
345
+ etlplus extract --from file data.csv
346
+ etlplus load --to file data.json < data.json
338
347
  ```
339
348
 
340
349
  ## Transformation Operations
@@ -1,7 +1,6 @@
1
1
  etlplus/__init__.py,sha256=M2gScnyir6WOMAh_EuoQIiAzdcTls0_5hbd_Q6of8I0,1021
2
- etlplus/__main__.py,sha256=lSbVOF5Mnd_ljmCqK7nTuF_MRDYTkL73eZEFeUQ_vnI,510
2
+ etlplus/__main__.py,sha256=btoROneNiigyfBU7BSzPKZ1R9gzBMpxcpsbPwmuHwTM,479
3
3
  etlplus/__version__.py,sha256=1E0GMK_yUWCMQFKxXjTvyMwofi0qT2k4CDNiHWiymWE,327
4
- etlplus/cli.py,sha256=dmMW5dLbFiDRGne97qDsqr3YuU30g_Ekl_vo7bgDLig,21752
5
4
  etlplus/enums.py,sha256=V_j18Ud2BCXpFsBk2pZGrvCVrvAMJ7uja1z9fppFGso,10175
6
5
  etlplus/extract.py,sha256=f44JdHhNTACxgn44USx05paKTwq7LQY-V4wANCW9hVM,6173
7
6
  etlplus/file.py,sha256=RxIAsGDN4f_vNA2B5-ct88JNd_ISAyYbooIRE5DstS8,17972
@@ -31,6 +30,10 @@ etlplus/api/pagination/paginator.py,sha256=wtdY_er4yfjx5yTUQJ1gPq-IuWmpLAHeG5buB
31
30
  etlplus/api/rate_limiting/__init__.py,sha256=ZySB1dZettEDnWvI1EHf_TZ9L08M_kKsNR-Y_lbU6kI,1070
32
31
  etlplus/api/rate_limiting/config.py,sha256=2b4wIynblN-1EyMqI4aXa71SljzSjXYh5N1Nngr3jOg,9406
33
32
  etlplus/api/rate_limiting/rate_limiter.py,sha256=Uxozqd_Ej5Lsj-M-mLT2WexChgWh7x35_YP10yqYPQA,7159
33
+ etlplus/cli/__init__.py,sha256=J97-Rv931IL1_b4AXnB7Fbbd7HKnHBpx18NQfC_kE6c,299
34
+ etlplus/cli/app.py,sha256=pc9VDUb3Qc8u5-XyDrHJkrSR9D3bq4e9zLbaD8KzyfY,32618
35
+ etlplus/cli/handlers.py,sha256=aI_ZlnJCGGkVnVJJPhmPRCXc31MxtLaOeqqJoo3ci48,15816
36
+ etlplus/cli/main.py,sha256=9hoitdc9FisrXzwZniTglPWwKsODFAW-A-2QQV4NkBs,12565
34
37
  etlplus/config/__init__.py,sha256=VZWzOg7d2YR9NT6UwKTv44yf2FRUMjTHynkm1Dl5Qzo,1486
35
38
  etlplus/config/connector.py,sha256=0-TIwevHbKRHVmucvyGpPd-3tB1dKHB-dj0yJ6kq5eY,9809
36
39
  etlplus/config/jobs.py,sha256=hmzRCqt0OvCEZZR4ONKrd3lvSv0OmayjLc4yOBk3ug8,7399
@@ -40,9 +43,9 @@ etlplus/config/types.py,sha256=a0epJ3z16HQ5bY3Ctf8s_cQPa3f0HHcwdOcjCP2xoG4,4954
40
43
  etlplus/config/utils.py,sha256=4SUHMkt5bKBhMhiJm-DrnmE2Q4TfOgdNCKz8PJDS27o,3443
41
44
  etlplus/validation/__init__.py,sha256=Pe5Xg1_EA4uiNZGYu5WTF3j7odjmyxnAJ8rcioaplSQ,1254
42
45
  etlplus/validation/utils.py,sha256=Mtqg449VIke0ziy_wd2r6yrwJzQkA1iulZC87FzXMjo,10201
43
- etlplus-0.3.25.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
44
- etlplus-0.3.25.dist-info/METADATA,sha256=QehDLWQ0MN7sUykhLs9gQgOrT3ru_QgaEFftJCkgNgw,16730
45
- etlplus-0.3.25.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
46
- etlplus-0.3.25.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
47
- etlplus-0.3.25.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
48
- etlplus-0.3.25.dist-info/RECORD,,
46
+ etlplus-0.4.6.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
47
+ etlplus-0.4.6.dist-info/METADATA,sha256=oq0-zYAVKNkZFCBPecObqjJ2LuYcbSX5kVjCErpiDO4,17278
48
+ etlplus-0.4.6.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
49
+ etlplus-0.4.6.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
50
+ etlplus-0.4.6.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
51
+ etlplus-0.4.6.dist-info/RECORD,,