etlplus 0.7.0__py3-none-any.whl → 0.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/database/orm.py CHANGED
@@ -13,9 +13,8 @@ Usage
13
13
  from __future__ import annotations
14
14
 
15
15
  import re
16
- from collections.abc import Callable
17
- from pathlib import Path
18
16
  from typing import Any
17
+ from typing import Final
19
18
 
20
19
  from sqlalchemy import Boolean
21
20
  from sqlalchemy import CheckConstraint
@@ -41,11 +40,15 @@ from sqlalchemy.orm import DeclarativeBase
41
40
  from sqlalchemy.orm import mapped_column
42
41
  from sqlalchemy.types import TypeEngine
43
42
 
43
+ from ..types import StrPath
44
44
  from .schema import ForeignKeySpec
45
45
  from .schema import TableSpec
46
46
  from .schema import load_table_specs
47
+ from .types import ModelRegistry
48
+ from .types import TypeFactory
49
+
50
+ # SECTION: EXPORTS ========================================================== #
47
51
 
48
- # SECTION: INTERNAL CONSTANTS =============================================== #
49
52
 
50
53
  __all__ = [
51
54
  # Classes
@@ -57,7 +60,9 @@ __all__ = [
57
60
  ]
58
61
 
59
62
 
60
- _TYPE_MAPPING: dict[str, Callable[[list[int]], TypeEngine]] = {
63
+ # SECTION: INTERNAL CONSTANTS =============================================== #
64
+
65
+ _TYPE_MAPPING: Final[dict[str, TypeFactory]] = {
61
66
  'int': lambda _: Integer(),
62
67
  'integer': lambda _: Integer(),
63
68
  'bigint': lambda _: Integer(),
@@ -102,6 +107,8 @@ _TYPE_MAPPING: dict[str, Callable[[list[int]], TypeEngine]] = {
102
107
  class Base(DeclarativeBase):
103
108
  """Base class for all ORM models."""
104
109
 
110
+ __abstract__ = True
111
+
105
112
 
106
113
  # SECTION: INTERNAL FUNCTIONS =============================================== #
107
114
 
@@ -191,7 +198,7 @@ def build_models(
191
198
  specs: list[TableSpec],
192
199
  *,
193
200
  base: type[DeclarativeBase] = Base,
194
- ) -> dict[str, type[DeclarativeBase]]:
201
+ ) -> ModelRegistry:
195
202
  """
196
203
  Build SQLAlchemy ORM models from table specifications.
197
204
  Parameters
@@ -202,10 +209,10 @@ def build_models(
202
209
  Base class for the ORM models (default: :class:`Base`).
203
210
  Returns
204
211
  -------
205
- dict[str, type[DeclarativeBase]]
212
+ ModelRegistry
206
213
  Registry mapping fully qualified table names to ORM model classes.
207
214
  """
208
- registry: dict[str, type[DeclarativeBase]] = {}
215
+ registry: ModelRegistry = {}
209
216
 
210
217
  for spec in specs:
211
218
  table_args: list[object] = []
@@ -302,23 +309,23 @@ def build_models(
302
309
 
303
310
 
304
311
  def load_and_build_models(
305
- path: str | Path,
312
+ path: StrPath,
306
313
  *,
307
314
  base: type[DeclarativeBase] = Base,
308
- ) -> dict[str, type[DeclarativeBase]]:
315
+ ) -> ModelRegistry:
309
316
  """
310
317
  Load table specifications from a file and build SQLAlchemy models.
311
318
 
312
319
  Parameters
313
320
  ----------
314
- path : str | Path
321
+ path : StrPath
315
322
  Path to the YAML file containing table specifications.
316
323
  base : type[DeclarativeBase], optional
317
324
  Base class for the ORM models (default: :class:`Base`).
318
325
 
319
326
  Returns
320
327
  -------
321
- dict[str, type[DeclarativeBase]]
328
+ ModelRegistry
322
329
  Registry mapping fully qualified table names to ORM model classes.
323
330
  """
324
331
  return build_models(load_table_specs(path), base=base)
@@ -16,6 +16,7 @@ from pydantic import ConfigDict
16
16
  from pydantic import Field
17
17
 
18
18
  from ..file import File
19
+ from ..types import StrPath
19
20
 
20
21
  # SECTION: EXPORTS ========================================================== #
21
22
 
@@ -244,14 +245,14 @@ class TableSpec(BaseModel):
244
245
 
245
246
 
246
247
  def load_table_specs(
247
- path: str | Path,
248
+ path: StrPath,
248
249
  ) -> list[TableSpec]:
249
250
  """
250
251
  Load table specifications from a YAML file.
251
252
 
252
253
  Parameters
253
254
  ----------
254
- path : str | Path
255
+ path : StrPath
255
256
  Path to the YAML file containing table specifications.
256
257
 
257
258
  Returns
@@ -0,0 +1,33 @@
1
+ """
2
+ :mod:`etlplus.database.types` module.
3
+
4
+ Shared type aliases leveraged across :mod:`etlplus.database` modules.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from collections.abc import Callable
10
+
11
+ from sqlalchemy.orm import DeclarativeBase
12
+ from sqlalchemy.types import TypeEngine
13
+
14
+ # SECTION: EXPORTS ========================================================== #
15
+
16
+
17
+ __all__ = [
18
+ # Type Aliases
19
+ 'ModelRegistry',
20
+ 'TypeFactory',
21
+ ]
22
+
23
+
24
+ # SECTION: TYPE ALIASES ===================================================== #
25
+
26
+
27
+ # pylint: disable=invalid-name
28
+
29
+ # Registry mapping fully qualified table names to declarative classes.
30
+ type ModelRegistry = dict[str, type[DeclarativeBase]]
31
+
32
+ # Callable producing a SQLAlchemy TypeEngine from parsed parameters.
33
+ type TypeFactory = Callable[[list[int]], TypeEngine]
etlplus/load.py CHANGED
@@ -104,7 +104,7 @@ def load_data(
104
104
  return File(source, FileFormat.JSON).read_json()
105
105
 
106
106
  if isinstance(source, str):
107
- # Special case: '-' means read JSON from stdin (Unix convention).
107
+ # Special case: '-' means read JSON from STDIN (Unix convention).
108
108
  if source == '-':
109
109
  raw = sys.stdin.read()
110
110
  return _parse_json_string(raw)
etlplus/types.py CHANGED
@@ -225,3 +225,8 @@ type Sleeper = Callable[[float], None]
225
225
 
226
226
  # Numeric timeout in seconds or ``None`` for no timeout.
227
227
  type Timeout = float | None
228
+
229
+ # -- Templates -- #
230
+
231
+ # Allowed template keys for bundled DDL rendering.
232
+ type TemplateKey = Literal['ddl', 'view']
etlplus/utils.py CHANGED
@@ -6,7 +6,6 @@ Small shared helpers used across modules.
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
- import argparse
10
9
  import json
11
10
  from collections.abc import Callable
12
11
  from collections.abc import Mapping
@@ -22,7 +21,6 @@ from .types import StrAnyMap
22
21
  __all__ = [
23
22
  # Data utilities
24
23
  'count_records',
25
- 'json_type',
26
24
  'print_json',
27
25
  # Mapping utilities
28
26
  'cast_str_dict',
@@ -119,35 +117,6 @@ def count_records(
119
117
  return len(data) if isinstance(data, list) else 1
120
118
 
121
119
 
122
- def json_type(
123
- option: str,
124
- ) -> Any:
125
- """
126
- Argparse ``type=`` hook that parses a JSON string.
127
-
128
- Parameters
129
- ----------
130
- option : str
131
- Raw CLI string to parse as JSON.
132
-
133
- Returns
134
- -------
135
- Any
136
- Parsed JSON value.
137
-
138
- Raises
139
- ------
140
- argparse.ArgumentTypeError
141
- If the input cannot be parsed as JSON.
142
- """
143
- try:
144
- return json.loads(option)
145
- except json.JSONDecodeError as e: # pragma: no cover - argparse path
146
- raise argparse.ArgumentTypeError(
147
- f'Invalid JSON: {e.msg} (pos {e.pos})',
148
- ) from e
149
-
150
-
151
120
  def maybe_mapping(
152
121
  value: Any,
153
122
  ) -> StrAnyMap | None:
@@ -181,7 +150,7 @@ def print_json(
181
150
  Returns
182
151
  -------
183
152
  None
184
- This helper writes directly to ``stdout``.
153
+ This helper writes directly to STDOUT.
185
154
  """
186
155
  print(json.dumps(obj, indent=2, ensure_ascii=False))
187
156
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: etlplus
3
- Version: 0.7.0
3
+ Version: 0.9.0
4
4
  Summary: A Swiss Army knife for simple ETL operations
5
5
  Home-page: https://github.com/Dagitali/ETLPlus
6
6
  Author: ETLPlus Team
@@ -64,7 +64,8 @@ package and command-line interface for data extraction, validation, transformati
64
64
  - [Quickstart](#quickstart)
65
65
  - [Usage](#usage)
66
66
  - [Command Line Interface](#command-line-interface)
67
- - [Inspect Pipelines](#inspect-pipelines)
67
+ - [Argument Order and Required Options](#argument-order-and-required-options)
68
+ - [Check Pipelines](#check-pipelines)
68
69
  - [Render SQL DDL](#render-sql-ddl)
69
70
  - [Extract Data](#extract-data)
70
71
  - [Validate Data](#validate-data)
@@ -151,8 +152,8 @@ etlplus --version
151
152
 
152
153
  # One-liner: extract CSV, filter, select, and write JSON
153
154
  etlplus extract file examples/data/sample.csv \
154
- | etlplus transform - --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
155
- -o temp/sample_output.json
155
+ | etlplus transform --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
156
+ - temp/sample_output.json
156
157
  ```
157
158
 
158
159
  [Python API](#python-api):
@@ -182,6 +183,30 @@ etlplus --help
182
183
  etlplus --version
183
184
  ```
184
185
 
186
+ The CLI is implemented with Typer (Click-based). There is no argparse compatibility layer, so rely
187
+ on the documented commands/flags and run `etlplus <command> --help` for current options.
188
+
189
+ **Example error messages:**
190
+
191
+ - If you omit a required argument: `Error: Missing required argument 'SOURCE'.`
192
+ - If you place an option before its argument: `Error: Option '--source-format' must follow the 'SOURCE' argument.`
193
+
194
+ #### Argument Order and Required Options
195
+
196
+ For each command, positional arguments must precede options. Required options must follow their
197
+ associated argument:
198
+
199
+ - **extract**: `etlplus extract SOURCE [--source-format ...] [--source-type ...]`
200
+ - `SOURCE` is required. `--source-format` and `--source-type` must follow `SOURCE`.
201
+ - **transform**: `etlplus transform [--operations ...] SOURCE [--source-format ...] [--source-type ...] TARGET [--target-format ...] [--target-type ...]`
202
+ - `SOURCE` and `TARGET` are required. Format/type options must follow their respective argument.
203
+ - **load**: `etlplus load TARGET [--target-format ...] [--target-type ...] [--source-format ...]`
204
+ - `TARGET` is required. `--target-format` and `--target-type` must follow `TARGET`.
205
+ - **validate**: `etlplus validate SOURCE [--rules ...] [--source-format ...] [--source-type ...]`
206
+ - `SOURCE` is required. `--rules` and format/type options must follow `SOURCE`.
207
+
208
+ If required arguments or options are missing, or if options are placed before their associated argument, the CLI will display a clear error message.
209
+
185
210
  #### Check Pipelines
186
211
 
187
212
  Use `etlplus check` to explore pipeline YAML definitions without running them. The command can print
@@ -248,7 +273,7 @@ etlplus extract api https://api.example.com/data
248
273
 
249
274
  Save extracted data to file:
250
275
  ```bash
251
- etlplus extract file examples/data/sample.csv -o temp/sample_output.json
276
+ etlplus extract file examples/data/sample.csv > temp/sample_output.json
252
277
  ```
253
278
 
254
279
  #### Validate Data
@@ -267,59 +292,67 @@ etlplus validate examples/data/sample.json --rules '{"email": {"type": "string",
267
292
 
268
293
  When piping data through `etlplus transform`, use `--source-format` whenever the SOURCE argument is
269
294
  `-` or a literal payload, mirroring the `etlplus extract` semantics. Use `--target-format` to
270
- control the emitted format for stdout or other non-file outputs, just like `etlplus load`. File
271
- paths continue to infer formats from their extensions. Use `--from` to override the inferred source
272
- connector type and `--to` to override the inferred target connector type, matching the `etlplus
273
- extract`/`etlplus load` behavior.
295
+ control the emitted format for STDOUT or other non-file outputs, just like `etlplus load`. File
296
+ paths continue to infer formats from their extensions. Use `--source-type` to override the inferred
297
+ source connector type and `--target-type` to override the inferred target connector type, matching
298
+ the `etlplus extract`/`etlplus load` behavior.
274
299
 
275
300
  Transform file inputs while overriding connector types:
276
301
  ```bash
277
- etlplus transform --from file examples/data/sample.json \
302
+ etlplus transform \
278
303
  --operations '{"select": ["name", "email"]}' \
279
- --to file -o temp/selected_output.json
304
+ examples/data/sample.json --source-type file \
305
+ temp/selected_output.json --target-type file
280
306
  ```
281
307
 
282
308
  Filter and select fields:
283
309
  ```bash
284
- etlplus transform '[{"name": "John", "age": 30}, {"name": "Jane", "age": 25}]' \
285
- --operations '{"filter": {"field": "age", "op": "gt", "value": 26}, "select": ["name"]}'
310
+ etlplus transform \
311
+ --operations '{"filter": {"field": "age", "op": "gt", "value": 26}, "select": ["name"]}' \
312
+ '[{"name": "John", "age": 30}, {"name": "Jane", "age": 25}]'
286
313
  ```
287
314
 
288
315
  Sort data:
289
316
  ```bash
290
- etlplus transform examples/data/sample.json --operations '{"sort": {"field": "age", "reverse": true}}'
317
+ etlplus transform \
318
+ --operations '{"sort": {"field": "age", "reverse": true}}' \
319
+ examples/data/sample.json
291
320
  ```
292
321
 
293
322
  Aggregate data:
294
323
  ```bash
295
- etlplus transform examples/data/sample.json --operations '{"aggregate": {"field": "age", "func": "sum"}}'
324
+ etlplus transform \
325
+ --operations '{"aggregate": {"field": "age", "func": "sum"}}' \
326
+ examples/data/sample.json
296
327
  ```
297
328
 
298
329
  Map/rename fields:
299
330
  ```bash
300
- etlplus transform examples/data/sample.json --operations '{"map": {"name": "new_name"}}'
331
+ etlplus transform \
332
+ --operations '{"map": {"name": "new_name"}}' \
333
+ examples/data/sample.json
301
334
  ```
302
335
 
303
336
  #### Load Data
304
337
 
305
- `etlplus load` consumes JSON from stdin; provide only the target argument plus optional flags.
338
+ `etlplus load` consumes JSON from STDIN; provide only the target argument plus optional flags.
306
339
 
307
340
  Load to JSON file:
308
341
  ```bash
309
342
  etlplus extract file examples/data/sample.json \
310
- | etlplus load --to file temp/sample_output.json
343
+ | etlplus load temp/sample_output.json --target-type file
311
344
  ```
312
345
 
313
346
  Load to CSV file:
314
347
  ```bash
315
348
  etlplus extract file examples/data/sample.csv \
316
- | etlplus load --to file temp/sample_output.csv
349
+ | etlplus load temp/sample_output.csv --target-type file
317
350
  ```
318
351
 
319
352
  Load to REST API:
320
353
  ```bash
321
354
  cat examples/data/sample.json \
322
- | etlplus load --to api https://api.example.com/endpoint
355
+ | etlplus load https://api.example.com/endpoint --target-type api
323
356
  ```
324
357
 
325
358
  ### Python API
@@ -366,28 +399,28 @@ etlplus check --config examples/configs/pipeline.yml --summary
366
399
 
367
400
  # Run a job
368
401
  etlplus run --config examples/configs/pipeline.yml --job file_to_file_customers
369
-
370
- # Deprecated shim (will be removed): etlplus pipeline
371
402
  ```
372
403
 
373
404
  ### Complete ETL Pipeline Example
374
405
 
375
406
  ```bash
376
407
  # 1. Extract from CSV
377
- etlplus extract file examples/data/sample.csv -o temp/sample_extracted.json
408
+ etlplus extract file examples/data/sample.csv > temp/sample_extracted.json
378
409
 
379
410
  # 2. Transform (filter and select fields)
380
- etlplus transform temp/sample_extracted.json \
411
+ etlplus transform \
381
412
  --operations '{"filter": {"field": "age", "op": "gt", "value": 25}, "select": ["name", "email"]}' \
382
- -o temp/sample_transformed.json
413
+ temp/sample_extracted.json \
414
+ temp/sample_transformed.json
383
415
 
384
416
  # 3. Validate transformed data
385
- etlplus validate temp/sample_transformed.json \
386
- --rules '{"name": {"type": "string", "required": true}, "email": {"type": "string", "required": true}}'
417
+ etlplus validate \
418
+ --rules '{"name": {"type": "string", "required": true}, "email": {"type": "string", "required": true}}' \
419
+ temo/sample_transformed.json
387
420
 
388
421
  # 4. Load to CSV
389
422
  cat temp/sample_transformed.json \
390
- | etlplus load --to temp/sample_output.csv
423
+ | etlplus load temp/sample_output.csv
391
424
  ```
392
425
 
393
426
  ### Format Overrides
@@ -400,14 +433,14 @@ Examples (zsh):
400
433
 
401
434
  ```zsh
402
435
  # Force CSV parsing for an extension-less file
403
- etlplus extract --from file data.txt --source-format csv
436
+ etlplus extract data.txt --source-type file --source-format csv
404
437
 
405
438
  # Write CSV to a file without the .csv suffix
406
- etlplus load --to file output.bin --target-format csv < data.json
439
+ etlplus load output.bin --target-type file --target-format csv < data.json
407
440
 
408
441
  # Leave the flags off when extensions already match the desired format
409
- etlplus extract --from file data.csv
410
- etlplus load --to file data.json < data.json
442
+ etlplus extract data.csv --source-type file
443
+ etlplus load data.json --target-type file < data.json
411
444
  ```
412
445
 
413
446
  ## Transformation Operations
@@ -4,16 +4,16 @@ etlplus/__version__.py,sha256=1E0GMK_yUWCMQFKxXjTvyMwofi0qT2k4CDNiHWiymWE,327
4
4
  etlplus/enums.py,sha256=V_j18Ud2BCXpFsBk2pZGrvCVrvAMJ7uja1z9fppFGso,10175
5
5
  etlplus/extract.py,sha256=f44JdHhNTACxgn44USx05paKTwq7LQY-V4wANCW9hVM,6173
6
6
  etlplus/file.py,sha256=RxIAsGDN4f_vNA2B5-ct88JNd_ISAyYbooIRE5DstS8,17972
7
- etlplus/load.py,sha256=BwF3gT4gIr-5CvNMz_aLTCl-w2ihWSTxNVd4X92XFwI,8737
7
+ etlplus/load.py,sha256=R_y0_vtsEo1bwxWVQu2bfhB5ZIJoIoWu2ycCdvY4RnE,8737
8
8
  etlplus/mixins.py,sha256=ifGpHwWv7U00yqGf-kN93vJax2IiK4jaGtTsPsO3Oak,1350
9
9
  etlplus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  etlplus/run.py,sha256=X4kp5FQlIWVf1_d9oSrchKau7BFDCE1Zkscvu7WPaWw,12340
11
11
  etlplus/run_helpers.py,sha256=bj6MkaeFxjl3CeKG1HoXKx5DwAlXNERVW-GX-z1P_qQ,24373
12
12
  etlplus/transform.py,sha256=uAUVDDHYCgx7GpVez9IK3OAZM-CnCuMa9iox3vwGGJA,25296
13
- etlplus/types.py,sha256=SJiZ7wJiSnV4CEvF-9E5nSFLBo4DT9OqHQqj1GSHkv8,6042
14
- etlplus/utils.py,sha256=_fn8b-SAdxiw28VX-Ugr8sZUPZI9mEkWKAGExlgxhJA,13993
13
+ etlplus/types.py,sha256=1hsDlnF6r76zAwaUYay-i6pCM-Y0IU5nP7Crj8PLCQ4,6157
14
+ etlplus/utils.py,sha256=BMLTWAvCJj3zLEcffBgURYnu0UGhhXsfH2WWpAt7fV8,13363
15
15
  etlplus/validate.py,sha256=7rJoEI_SIILdPpoBqqh2UJqg9oeReDz34mYSlc3t7Qg,12989
16
- etlplus/api/README.md,sha256=UkK5PiZWXbbnMNP0MaPa56S88PjSqOwhMNCyswOhvKc,7329
16
+ etlplus/api/README.md,sha256=ZiyjxLz0LfFCzeYKXwtH8yY1OJ4hXCju7t2ICroFoU8,7215
17
17
  etlplus/api/__init__.py,sha256=P2JUYFy6Ep4t6xnsBiCBfQCkQLHYYhA-yXPXCobS8Y0,4295
18
18
  etlplus/api/auth.py,sha256=GOO5on-LoMS1GXTAhtK9rFcfpjbBcNeA6NE5UZwIq0g,12158
19
19
  etlplus/api/config.py,sha256=wRpOaZ31sPReVzEMme0jKl_37nqgraESwuYSNxP_xDo,17397
@@ -31,9 +31,14 @@ etlplus/api/rate_limiting/__init__.py,sha256=ZySB1dZettEDnWvI1EHf_TZ9L08M_kKsNR-
31
31
  etlplus/api/rate_limiting/config.py,sha256=2b4wIynblN-1EyMqI4aXa71SljzSjXYh5N1Nngr3jOg,9406
32
32
  etlplus/api/rate_limiting/rate_limiter.py,sha256=Uxozqd_Ej5Lsj-M-mLT2WexChgWh7x35_YP10yqYPQA,7159
33
33
  etlplus/cli/__init__.py,sha256=J97-Rv931IL1_b4AXnB7Fbbd7HKnHBpx18NQfC_kE6c,299
34
- etlplus/cli/app.py,sha256=SYPO-NDwXgymJrACw39jZ_NJrSKAs0O8anuWR5o42WM,35893
35
- etlplus/cli/handlers.py,sha256=nFMvqHQhJ8kJZPisDCiUHeOhjlqAO6hJvRjXiJTcU74,18951
36
- etlplus/cli/main.py,sha256=ijYOy72SEsxrTEBan5yADW8CZyr0yddVF8HeMgFw6Zg,16576
34
+ etlplus/cli/commands.py,sha256=_nias9eSMZoTBiicXDNEkWLYfzd4-CcO2j_xPPxghls,24632
35
+ etlplus/cli/constants.py,sha256=KIZj7J2tNf5mJbkqAdZmu5FXYW2FQmxwgeOKWc3-3Hg,1944
36
+ etlplus/cli/handlers.py,sha256=K0GazvrPgocJ-63HZqF0xhyJk8TB1Gcj-eIbWltXKRU,17759
37
+ etlplus/cli/io.py,sha256=7sldiZz4-Geomge5IO_XYykXPa6UiORfUWzLCdQePG8,7846
38
+ etlplus/cli/main.py,sha256=IgeqxypixfwLHR-QcpgVMQ7vMZ865bXOh2oO9v-BWeM,5234
39
+ etlplus/cli/options.py,sha256=vfXT3YLh7wG1iC-aTdSg6ItMC8l6n0Lozmy53XjqLbA,1199
40
+ etlplus/cli/state.py,sha256=Pfd8ru0wYIN7eGp1_A0tioqs1LiCDZCuJ6AnjZb6yYQ,8027
41
+ etlplus/cli/types.py,sha256=tclhKVJXDqHzlTQBYKARfqMgDOcuBJ-Zej2pvFy96WM,652
37
42
  etlplus/config/__init__.py,sha256=VZWzOg7d2YR9NT6UwKTv44yf2FRUMjTHynkm1Dl5Qzo,1486
38
43
  etlplus/config/connector.py,sha256=0-TIwevHbKRHVmucvyGpPd-3tB1dKHB-dj0yJ6kq5eY,9809
39
44
  etlplus/config/jobs.py,sha256=hmzRCqt0OvCEZZR4ONKrd3lvSv0OmayjLc4yOBk3ug8,7399
@@ -41,19 +46,20 @@ etlplus/config/pipeline.py,sha256=Va4MQY6KEyKqHGMKPmh09ZcGpx95br-iNUjpkqtzVbw,95
41
46
  etlplus/config/profile.py,sha256=Ss2zedQGjkaGSpvBLTD4SZaWViMJ7TJPLB8Q2_BTpPg,1898
42
47
  etlplus/config/types.py,sha256=a0epJ3z16HQ5bY3Ctf8s_cQPa3f0HHcwdOcjCP2xoG4,4954
43
48
  etlplus/config/utils.py,sha256=4SUHMkt5bKBhMhiJm-DrnmE2Q4TfOgdNCKz8PJDS27o,3443
44
- etlplus/database/__init__.py,sha256=0gWnMlQiVHS6SVUxIT9zklQUHU36y-2RF_gN1cx7icg,1018
45
- etlplus/database/ddl.py,sha256=lIar9KIOoBRslp_P0DnpoMDXzkjt64J5-iVV7CeSV_M,7747
46
- etlplus/database/engine.py,sha256=54f-XtNKIuogJhsLV9cX_xPoBwcl_HNJTL5HqMCi8kw,3986
47
- etlplus/database/orm.py,sha256=StjeguokM70oNKq7mNXLyc4_mYUZR-EKW3oGRlsd8QE,9962
48
- etlplus/database/schema.py,sha256=BmRP2wwX2xex1phLm0tnHrP6A2AQgguA-hSLnK0xwwc,7003
49
+ etlplus/database/__init__.py,sha256=AKJsDl2RHuRGPS-eXgNJeh4aSncJP5Y0yLApBF6i7i8,1052
50
+ etlplus/database/ddl.py,sha256=z9KvHi1MPhPBLHxMDdqJgLTp3A2-lcz0gqhZ7HIE6kU,7916
51
+ etlplus/database/engine.py,sha256=7rr7YndA8LwyWJL8k1YhQbqxxmW4gWEUQjp0NwQcYtc,4061
52
+ etlplus/database/orm.py,sha256=gCSqH-CjQz6tV9133-VqgiwokK5ylun0BwXaIWfImAo,10008
53
+ etlplus/database/schema.py,sha256=HNTgglI8qvQLInr7gq--2lLmLKHzAZTL2MJUOIw9DlY,7025
54
+ etlplus/database/types.py,sha256=_pkQyC14TzAlgyeIqZG4F5LWYknZbHw3TW68Auk7Ya0,795
49
55
  etlplus/templates/__init__.py,sha256=tsniN7XJYs3NwYxJ6c2HD5upHP3CDkLx-bQCMt97UOM,106
50
56
  etlplus/templates/ddl.sql.j2,sha256=s8fMWvcb4eaJVXkifuib1aQPljtZ8buuyB_uA-ZdU3Q,4734
51
57
  etlplus/templates/view.sql.j2,sha256=Iy8DHfhq5yyvrUKDxqp_aHIEXY4Tm6j4wT7YDEFWAhk,2180
52
58
  etlplus/validation/__init__.py,sha256=Pe5Xg1_EA4uiNZGYu5WTF3j7odjmyxnAJ8rcioaplSQ,1254
53
59
  etlplus/validation/utils.py,sha256=Mtqg449VIke0ziy_wd2r6yrwJzQkA1iulZC87FzXMjo,10201
54
- etlplus-0.7.0.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
55
- etlplus-0.7.0.dist-info/METADATA,sha256=ulMPDyXMX6p-NcxMBSZfegGrv0LNwAu_686_TVrkJPM,19383
56
- etlplus-0.7.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
57
- etlplus-0.7.0.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
58
- etlplus-0.7.0.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
59
- etlplus-0.7.0.dist-info/RECORD,,
60
+ etlplus-0.9.0.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
61
+ etlplus-0.9.0.dist-info/METADATA,sha256=ynMgjG7Wv_xkP0fBaAOj3-rpUgJHwZ7UOzCxoU8CBeE,21035
62
+ etlplus-0.9.0.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
63
+ etlplus-0.9.0.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
64
+ etlplus-0.9.0.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
65
+ etlplus-0.9.0.dist-info/RECORD,,