etlplus 0.10.5__py3-none-any.whl → 0.11.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus/cli/commands.py +1 -1
- etlplus/cli/constants.py +1 -1
- etlplus/cli/io.py +2 -2
- etlplus/config/pipeline.py +2 -2
- etlplus/enums.py +2 -270
- etlplus/extract.py +5 -7
- etlplus/file/__init__.py +27 -0
- etlplus/file/core.py +287 -0
- etlplus/file/csv.py +82 -0
- etlplus/file/enums.py +266 -0
- etlplus/file/json.py +87 -0
- etlplus/file/xml.py +165 -0
- etlplus/file/yaml.py +125 -0
- etlplus/load.py +9 -12
- etlplus/run.py +6 -9
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/METADATA +1 -1
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/RECORD +21 -15
- etlplus/file.py +0 -652
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/WHEEL +0 -0
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/entry_points.txt +0 -0
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/licenses/LICENSE +0 -0
- {etlplus-0.10.5.dist-info → etlplus-0.11.2.dist-info}/top_level.txt +0 -0
etlplus/file/json.py
ADDED
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.json` module.
|
|
3
|
+
|
|
4
|
+
JSON read/write helpers.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import json
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import cast
|
|
12
|
+
|
|
13
|
+
from ..types import JSONData
|
|
14
|
+
from ..types import JSONDict
|
|
15
|
+
from ..types import JSONList
|
|
16
|
+
from ..utils import count_records
|
|
17
|
+
|
|
18
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def read(
|
|
22
|
+
path: Path,
|
|
23
|
+
) -> JSONData:
|
|
24
|
+
"""
|
|
25
|
+
Load and validate JSON payloads from ``path``.
|
|
26
|
+
|
|
27
|
+
Parameters
|
|
28
|
+
----------
|
|
29
|
+
path : Path
|
|
30
|
+
Path to the JSON file on disk.
|
|
31
|
+
|
|
32
|
+
Returns
|
|
33
|
+
-------
|
|
34
|
+
JSONData
|
|
35
|
+
The structured data read from the JSON file.
|
|
36
|
+
|
|
37
|
+
Raises
|
|
38
|
+
------
|
|
39
|
+
TypeError
|
|
40
|
+
If the JSON root is not an object or an array of objects.
|
|
41
|
+
"""
|
|
42
|
+
with path.open('r', encoding='utf-8') as handle:
|
|
43
|
+
loaded = json.load(handle)
|
|
44
|
+
|
|
45
|
+
if isinstance(loaded, dict):
|
|
46
|
+
return cast(JSONDict, loaded)
|
|
47
|
+
if isinstance(loaded, list):
|
|
48
|
+
if all(isinstance(item, dict) for item in loaded):
|
|
49
|
+
return cast(JSONList, loaded)
|
|
50
|
+
raise TypeError(
|
|
51
|
+
'JSON array must contain only objects (dicts) when loading file',
|
|
52
|
+
)
|
|
53
|
+
raise TypeError(
|
|
54
|
+
'JSON root must be an object or an array of objects when loading file',
|
|
55
|
+
)
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` as formatted JSON to ``path``.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the JSON file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to serialize as JSON.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
int
|
|
75
|
+
The number of records written to the JSON file.
|
|
76
|
+
"""
|
|
77
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
78
|
+
with path.open('w', encoding='utf-8') as handle:
|
|
79
|
+
json.dump(
|
|
80
|
+
data,
|
|
81
|
+
handle,
|
|
82
|
+
indent=2,
|
|
83
|
+
ensure_ascii=False,
|
|
84
|
+
)
|
|
85
|
+
handle.write('\n')
|
|
86
|
+
|
|
87
|
+
return count_records(data)
|
etlplus/file/xml.py
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.xml` module.
|
|
3
|
+
|
|
4
|
+
XML read/write helpers.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
import xml.etree.ElementTree as ET
|
|
10
|
+
from pathlib import Path
|
|
11
|
+
from typing import Any
|
|
12
|
+
|
|
13
|
+
from ..types import JSONData
|
|
14
|
+
from ..types import JSONDict
|
|
15
|
+
from ..utils import count_records
|
|
16
|
+
|
|
17
|
+
# SECTION: CONSTANTS ======================================================== #
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
DEFAULT_XML_ROOT = 'root'
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def _dict_to_element(
|
|
27
|
+
name: str,
|
|
28
|
+
payload: Any,
|
|
29
|
+
) -> ET.Element:
|
|
30
|
+
"""
|
|
31
|
+
Convert a dictionary-like payload into an XML element.
|
|
32
|
+
|
|
33
|
+
Parameters
|
|
34
|
+
----------
|
|
35
|
+
name : str
|
|
36
|
+
Name of the XML element.
|
|
37
|
+
payload : Any
|
|
38
|
+
The data to include in the XML element.
|
|
39
|
+
|
|
40
|
+
Returns
|
|
41
|
+
-------
|
|
42
|
+
ET.Element
|
|
43
|
+
The constructed XML element.
|
|
44
|
+
"""
|
|
45
|
+
element = ET.Element(name)
|
|
46
|
+
|
|
47
|
+
if isinstance(payload, dict):
|
|
48
|
+
text = payload.get('text')
|
|
49
|
+
if text is not None:
|
|
50
|
+
element.text = str(text)
|
|
51
|
+
|
|
52
|
+
for key, value in payload.items():
|
|
53
|
+
if key == 'text':
|
|
54
|
+
continue
|
|
55
|
+
if key.startswith('@'):
|
|
56
|
+
element.set(key[1:], str(value))
|
|
57
|
+
continue
|
|
58
|
+
if isinstance(value, list):
|
|
59
|
+
for item in value:
|
|
60
|
+
element.append(_dict_to_element(key, item))
|
|
61
|
+
else:
|
|
62
|
+
element.append(_dict_to_element(key, value))
|
|
63
|
+
elif isinstance(payload, list):
|
|
64
|
+
for item in payload:
|
|
65
|
+
element.append(_dict_to_element('item', item))
|
|
66
|
+
elif payload is not None:
|
|
67
|
+
element.text = str(payload)
|
|
68
|
+
|
|
69
|
+
return element
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _element_to_dict(
|
|
73
|
+
element: ET.Element,
|
|
74
|
+
) -> JSONDict:
|
|
75
|
+
"""
|
|
76
|
+
Convert an XML element into a nested dictionary.
|
|
77
|
+
|
|
78
|
+
Parameters
|
|
79
|
+
----------
|
|
80
|
+
element : ET.Element
|
|
81
|
+
XML element to convert.
|
|
82
|
+
|
|
83
|
+
Returns
|
|
84
|
+
-------
|
|
85
|
+
JSONDict
|
|
86
|
+
Nested dictionary representation of the XML element.
|
|
87
|
+
"""
|
|
88
|
+
result: JSONDict = {}
|
|
89
|
+
text = (element.text or '').strip()
|
|
90
|
+
if text:
|
|
91
|
+
result['text'] = text
|
|
92
|
+
|
|
93
|
+
for child in element:
|
|
94
|
+
child_data = _element_to_dict(child)
|
|
95
|
+
tag = child.tag
|
|
96
|
+
if tag in result:
|
|
97
|
+
existing = result[tag]
|
|
98
|
+
if isinstance(existing, list):
|
|
99
|
+
existing.append(child_data)
|
|
100
|
+
else:
|
|
101
|
+
result[tag] = [existing, child_data]
|
|
102
|
+
else:
|
|
103
|
+
result[tag] = child_data
|
|
104
|
+
|
|
105
|
+
for key, value in element.attrib.items():
|
|
106
|
+
if key in result:
|
|
107
|
+
result[f'@{key}'] = value
|
|
108
|
+
else:
|
|
109
|
+
result[key] = value
|
|
110
|
+
return result
|
|
111
|
+
|
|
112
|
+
|
|
113
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def read(
|
|
117
|
+
path: Path,
|
|
118
|
+
) -> JSONDict:
|
|
119
|
+
"""
|
|
120
|
+
Parse XML document at ``path`` into a nested dictionary.
|
|
121
|
+
|
|
122
|
+
Parameters
|
|
123
|
+
----------
|
|
124
|
+
path : Path
|
|
125
|
+
Path to the XML file on disk.
|
|
126
|
+
|
|
127
|
+
Returns
|
|
128
|
+
-------
|
|
129
|
+
JSONDict
|
|
130
|
+
Nested dictionary representation of the XML file.
|
|
131
|
+
"""
|
|
132
|
+
tree = ET.parse(path)
|
|
133
|
+
root = tree.getroot()
|
|
134
|
+
|
|
135
|
+
return {root.tag: _element_to_dict(root)}
|
|
136
|
+
|
|
137
|
+
|
|
138
|
+
def write(path: Path, data: JSONData, *, root_tag: str) -> int:
|
|
139
|
+
"""
|
|
140
|
+
Write ``data`` as XML to ``path`` and return record count.
|
|
141
|
+
|
|
142
|
+
Parameters
|
|
143
|
+
----------
|
|
144
|
+
path : Path
|
|
145
|
+
Path to the XML file on disk.
|
|
146
|
+
data : JSONData
|
|
147
|
+
Data to write as XML.
|
|
148
|
+
root_tag : str
|
|
149
|
+
Root tag name to use when writing XML files.
|
|
150
|
+
|
|
151
|
+
Returns
|
|
152
|
+
-------
|
|
153
|
+
int
|
|
154
|
+
The number of records written to the XML file.
|
|
155
|
+
"""
|
|
156
|
+
if isinstance(data, dict) and len(data) == 1:
|
|
157
|
+
root_name, payload = next(iter(data.items()))
|
|
158
|
+
root_element = _dict_to_element(str(root_name), payload)
|
|
159
|
+
else:
|
|
160
|
+
root_element = _dict_to_element(root_tag, data)
|
|
161
|
+
|
|
162
|
+
tree = ET.ElementTree(root_element)
|
|
163
|
+
tree.write(path, encoding='utf-8', xml_declaration=True)
|
|
164
|
+
|
|
165
|
+
return count_records(data)
|
etlplus/file/yaml.py
ADDED
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.yaml` module.
|
|
3
|
+
|
|
4
|
+
Optional YAML read/write helpers.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from __future__ import annotations
|
|
8
|
+
|
|
9
|
+
from pathlib import Path
|
|
10
|
+
from typing import Any
|
|
11
|
+
from typing import cast
|
|
12
|
+
|
|
13
|
+
from ..types import JSONData
|
|
14
|
+
from ..types import JSONDict
|
|
15
|
+
from ..types import JSONList
|
|
16
|
+
from ..utils import count_records
|
|
17
|
+
|
|
18
|
+
# SECTION: INTERNAL CONSTANTS =============================================== #
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
# Optional YAML support (lazy-loaded to avoid hard dependency)
|
|
22
|
+
# Cached access function to avoid global statements.
|
|
23
|
+
_YAML_CACHE: dict[str, Any] = {}
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
# SECTION: INTERNAL FUNCTIONS =============================================== #
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
def _get_yaml() -> Any:
|
|
30
|
+
"""
|
|
31
|
+
Return the PyYAML module, importing it on first use.
|
|
32
|
+
|
|
33
|
+
Raises an informative ImportError if the optional dependency is missing.
|
|
34
|
+
"""
|
|
35
|
+
mod = _YAML_CACHE.get('mod')
|
|
36
|
+
if mod is not None: # pragma: no cover - tiny branch
|
|
37
|
+
return mod
|
|
38
|
+
try:
|
|
39
|
+
_yaml_mod = __import__('yaml') # type: ignore[assignment]
|
|
40
|
+
except ImportError as e: # pragma: no cover
|
|
41
|
+
raise ImportError(
|
|
42
|
+
'YAML support requires optional dependency "PyYAML".\n'
|
|
43
|
+
'Install with: pip install PyYAML',
|
|
44
|
+
) from e
|
|
45
|
+
_YAML_CACHE['mod'] = _yaml_mod
|
|
46
|
+
|
|
47
|
+
return _yaml_mod
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def _require_yaml() -> None:
|
|
51
|
+
"""Ensure PyYAML is available or raise an informative error."""
|
|
52
|
+
_get_yaml()
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def read(
|
|
59
|
+
path: Path,
|
|
60
|
+
) -> JSONData:
|
|
61
|
+
"""
|
|
62
|
+
Load and validate YAML payloads from ``path``.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
path : Path
|
|
67
|
+
Path to the YAML file on disk.
|
|
68
|
+
|
|
69
|
+
Returns
|
|
70
|
+
-------
|
|
71
|
+
JSONData
|
|
72
|
+
The structured data read from the YAML file.
|
|
73
|
+
|
|
74
|
+
Raises
|
|
75
|
+
------
|
|
76
|
+
TypeError
|
|
77
|
+
If the YAML root is not an object or an array of objects.
|
|
78
|
+
"""
|
|
79
|
+
_require_yaml()
|
|
80
|
+
|
|
81
|
+
with path.open('r', encoding='utf-8') as handle:
|
|
82
|
+
loaded = _get_yaml().safe_load(handle)
|
|
83
|
+
|
|
84
|
+
if isinstance(loaded, dict):
|
|
85
|
+
return cast(JSONDict, loaded)
|
|
86
|
+
if isinstance(loaded, list):
|
|
87
|
+
if all(isinstance(item, dict) for item in loaded):
|
|
88
|
+
return cast(JSONList, loaded)
|
|
89
|
+
raise TypeError(
|
|
90
|
+
'YAML array must contain only objects (dicts) when loading',
|
|
91
|
+
)
|
|
92
|
+
raise TypeError(
|
|
93
|
+
'YAML root must be an object or an array of objects when loading',
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def write(
|
|
98
|
+
path: Path,
|
|
99
|
+
data: JSONData,
|
|
100
|
+
) -> int:
|
|
101
|
+
"""
|
|
102
|
+
Write ``data`` as YAML to ``path`` and return record count.
|
|
103
|
+
|
|
104
|
+
Parameters
|
|
105
|
+
----------
|
|
106
|
+
path : Path
|
|
107
|
+
Path to the YAML file on disk.
|
|
108
|
+
data : JSONData
|
|
109
|
+
Data to write as YAML.
|
|
110
|
+
|
|
111
|
+
Returns
|
|
112
|
+
-------
|
|
113
|
+
int
|
|
114
|
+
The number of records written.
|
|
115
|
+
"""
|
|
116
|
+
_require_yaml()
|
|
117
|
+
with path.open('w', encoding='utf-8') as handle:
|
|
118
|
+
_get_yaml().safe_dump(
|
|
119
|
+
data,
|
|
120
|
+
handle,
|
|
121
|
+
sort_keys=False,
|
|
122
|
+
allow_unicode=True,
|
|
123
|
+
default_flow_style=False,
|
|
124
|
+
)
|
|
125
|
+
return count_records(data)
|
etlplus/load.py
CHANGED
|
@@ -15,12 +15,9 @@ from typing import cast
|
|
|
15
15
|
import requests # type: ignore[import]
|
|
16
16
|
|
|
17
17
|
from .enums import DataConnectorType
|
|
18
|
-
from .enums import FileFormat
|
|
19
18
|
from .enums import HttpMethod
|
|
20
|
-
from .enums import coerce_data_connector_type
|
|
21
|
-
from .enums import coerce_file_format
|
|
22
|
-
from .enums import coerce_http_method
|
|
23
19
|
from .file import File
|
|
20
|
+
from .file import FileFormat
|
|
24
21
|
from .types import JSONData
|
|
25
22
|
from .types import JSONDict
|
|
26
23
|
from .types import JSONList
|
|
@@ -101,7 +98,7 @@ def load_data(
|
|
|
101
98
|
return cast(JSONData, source)
|
|
102
99
|
|
|
103
100
|
if isinstance(source, Path):
|
|
104
|
-
return File(source, FileFormat.JSON).
|
|
101
|
+
return File(source, FileFormat.JSON).read()
|
|
105
102
|
|
|
106
103
|
if isinstance(source, str):
|
|
107
104
|
# Special case: '-' means read JSON from STDIN (Unix convention).
|
|
@@ -111,7 +108,7 @@ def load_data(
|
|
|
111
108
|
candidate = Path(source)
|
|
112
109
|
if candidate.exists():
|
|
113
110
|
try:
|
|
114
|
-
return File(candidate, FileFormat.JSON).
|
|
111
|
+
return File(candidate, FileFormat.JSON).read()
|
|
115
112
|
except (OSError, json.JSONDecodeError, ValueError):
|
|
116
113
|
# Fall back to treating the string as raw JSON content.
|
|
117
114
|
pass
|
|
@@ -155,9 +152,9 @@ def load_to_file(
|
|
|
155
152
|
if file_format is None:
|
|
156
153
|
records = File(path).write(data)
|
|
157
154
|
ext = path.suffix.lstrip('.').lower()
|
|
158
|
-
fmt =
|
|
155
|
+
fmt = FileFormat.coerce(ext) if ext else FileFormat.JSON
|
|
159
156
|
else:
|
|
160
|
-
fmt =
|
|
157
|
+
fmt = FileFormat.coerce(file_format)
|
|
161
158
|
records = File(path, fmt).write(data)
|
|
162
159
|
if fmt is FileFormat.CSV and records == 0:
|
|
163
160
|
message = 'No data to write'
|
|
@@ -242,7 +239,7 @@ def load_to_api(
|
|
|
242
239
|
TypeError
|
|
243
240
|
If the session object is not valid.
|
|
244
241
|
"""
|
|
245
|
-
http_method =
|
|
242
|
+
http_method = HttpMethod.coerce(method)
|
|
246
243
|
|
|
247
244
|
# Apply a conservative timeout to guard against hanging requests.
|
|
248
245
|
timeout = kwargs.pop('timeout', 10.0)
|
|
@@ -316,7 +313,7 @@ def load(
|
|
|
316
313
|
"""
|
|
317
314
|
data = load_data(source)
|
|
318
315
|
|
|
319
|
-
match
|
|
316
|
+
match DataConnectorType.coerce(target_type):
|
|
320
317
|
case DataConnectorType.FILE:
|
|
321
318
|
# Prefer explicit format if provided, else infer from filename.
|
|
322
319
|
return load_to_file(data, target, file_format)
|
|
@@ -331,6 +328,6 @@ def load(
|
|
|
331
328
|
**kwargs,
|
|
332
329
|
)
|
|
333
330
|
case _:
|
|
334
|
-
# `
|
|
335
|
-
# explicit guard.
|
|
331
|
+
# :meth:`coerce` already raises for invalid connector types, but
|
|
332
|
+
# keep explicit guard for defensive programming.
|
|
336
333
|
raise ValueError(f'Invalid target type: {target_type}')
|
etlplus/run.py
CHANGED
|
@@ -23,7 +23,6 @@ from .api import RetryPolicy
|
|
|
23
23
|
from .api import Url
|
|
24
24
|
from .config import load_pipeline_config
|
|
25
25
|
from .enums import DataConnectorType
|
|
26
|
-
from .enums import coerce_data_connector_type
|
|
27
26
|
from .extract import extract
|
|
28
27
|
from .load import load
|
|
29
28
|
from .run_helpers import compose_api_request_env
|
|
@@ -185,8 +184,7 @@ def run(
|
|
|
185
184
|
|
|
186
185
|
data: Any
|
|
187
186
|
stype_raw = getattr(source_obj, 'type', None)
|
|
188
|
-
|
|
189
|
-
match stype:
|
|
187
|
+
match DataConnectorType.coerce(stype_raw or ''):
|
|
190
188
|
case DataConnectorType.FILE:
|
|
191
189
|
path = getattr(source_obj, 'path', None)
|
|
192
190
|
fmt = ex_opts.get('format') or getattr(
|
|
@@ -261,8 +259,8 @@ def run(
|
|
|
261
259
|
sleep_seconds=cast(float, env.get('sleep_seconds', 0.0)),
|
|
262
260
|
)
|
|
263
261
|
case _:
|
|
264
|
-
#
|
|
265
|
-
#
|
|
262
|
+
# :meth:`coerce` already raises for invalid connector types, but
|
|
263
|
+
# keep explicit guard for defensive programming.
|
|
266
264
|
raise ValueError(f'Unsupported source type: {stype_raw}')
|
|
267
265
|
|
|
268
266
|
# DRY: unified validation helper (pre/post transform)
|
|
@@ -318,8 +316,7 @@ def run(
|
|
|
318
316
|
overrides = job_obj.load.overrides or {}
|
|
319
317
|
|
|
320
318
|
ttype_raw = getattr(target_obj, 'type', None)
|
|
321
|
-
|
|
322
|
-
match ttype:
|
|
319
|
+
match DataConnectorType.coerce(ttype_raw or ''):
|
|
323
320
|
case DataConnectorType.FILE:
|
|
324
321
|
path = overrides.get('path') or getattr(target_obj, 'path', None)
|
|
325
322
|
fmt = overrides.get('format') or getattr(
|
|
@@ -357,8 +354,8 @@ def run(
|
|
|
357
354
|
)
|
|
358
355
|
result = load(data, 'database', str(conn))
|
|
359
356
|
case _:
|
|
360
|
-
#
|
|
361
|
-
#
|
|
357
|
+
# :meth:`coerce` already raises for invalid connector types, but
|
|
358
|
+
# keep explicit guard for defensive programming.
|
|
362
359
|
raise ValueError(f'Unsupported target type: {ttype_raw}')
|
|
363
360
|
|
|
364
361
|
# Return the terminal load result directly; callers (e.g., CLI) can wrap
|
|
@@ -1,13 +1,12 @@
|
|
|
1
1
|
etlplus/__init__.py,sha256=M2gScnyir6WOMAh_EuoQIiAzdcTls0_5hbd_Q6of8I0,1021
|
|
2
2
|
etlplus/__main__.py,sha256=btoROneNiigyfBU7BSzPKZ1R9gzBMpxcpsbPwmuHwTM,479
|
|
3
3
|
etlplus/__version__.py,sha256=1E0GMK_yUWCMQFKxXjTvyMwofi0qT2k4CDNiHWiymWE,327
|
|
4
|
-
etlplus/enums.py,sha256=
|
|
5
|
-
etlplus/extract.py,sha256=
|
|
6
|
-
etlplus/
|
|
7
|
-
etlplus/load.py,sha256=R_y0_vtsEo1bwxWVQu2bfhB5ZIJoIoWu2ycCdvY4RnE,8737
|
|
4
|
+
etlplus/enums.py,sha256=WyxpUEUPdYdXlueKDXGaSEo7o9OqCXyzjDOOPqmW8tw,8326
|
|
5
|
+
etlplus/extract.py,sha256=LOyL8_KCOaIGemTxSnKbN_ttfLWUljqT4OQxANe7G3k,6089
|
|
6
|
+
etlplus/load.py,sha256=aufl-2CpuI_J1hKBY1uFsoVf9Gfl9bKQjs233dYFf00,8631
|
|
8
7
|
etlplus/mixins.py,sha256=ifGpHwWv7U00yqGf-kN93vJax2IiK4jaGtTsPsO3Oak,1350
|
|
9
8
|
etlplus/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
10
|
-
etlplus/run.py,sha256=
|
|
9
|
+
etlplus/run.py,sha256=FjcMF56HUbSw2PAvB_dZWP-xTFP-Pa_QLYTsrjmFurw,12262
|
|
11
10
|
etlplus/run_helpers.py,sha256=bj6MkaeFxjl3CeKG1HoXKx5DwAlXNERVW-GX-z1P_qQ,24373
|
|
12
11
|
etlplus/transform.py,sha256=uAUVDDHYCgx7GpVez9IK3OAZM-CnCuMa9iox3vwGGJA,25296
|
|
13
12
|
etlplus/types.py,sha256=1hsDlnF6r76zAwaUYay-i6pCM-Y0IU5nP7Crj8PLCQ4,6157
|
|
@@ -31,10 +30,10 @@ etlplus/api/rate_limiting/__init__.py,sha256=ZySB1dZettEDnWvI1EHf_TZ9L08M_kKsNR-
|
|
|
31
30
|
etlplus/api/rate_limiting/config.py,sha256=2b4wIynblN-1EyMqI4aXa71SljzSjXYh5N1Nngr3jOg,9406
|
|
32
31
|
etlplus/api/rate_limiting/rate_limiter.py,sha256=Uxozqd_Ej5Lsj-M-mLT2WexChgWh7x35_YP10yqYPQA,7159
|
|
33
32
|
etlplus/cli/__init__.py,sha256=J97-Rv931IL1_b4AXnB7Fbbd7HKnHBpx18NQfC_kE6c,299
|
|
34
|
-
etlplus/cli/commands.py,sha256=
|
|
35
|
-
etlplus/cli/constants.py,sha256=
|
|
33
|
+
etlplus/cli/commands.py,sha256=g8_m3A8HEMyTRu2HctNiRoi2gtB5oSZCUEcyq-PIXos,24669
|
|
34
|
+
etlplus/cli/constants.py,sha256=E6Uy4WauLa_0zkzxqImXh-bb1gKdb9sBZQVc8QOzr2Q,1943
|
|
36
35
|
etlplus/cli/handlers.py,sha256=K0GazvrPgocJ-63HZqF0xhyJk8TB1Gcj-eIbWltXKRU,17759
|
|
37
|
-
etlplus/cli/io.py,sha256=
|
|
36
|
+
etlplus/cli/io.py,sha256=EFaBPYaBOyOllfMQWXgTjy-MPiGfNejicpD7ROrPyAE,7840
|
|
38
37
|
etlplus/cli/main.py,sha256=IgeqxypixfwLHR-QcpgVMQ7vMZ865bXOh2oO9v-BWeM,5234
|
|
39
38
|
etlplus/cli/options.py,sha256=vfXT3YLh7wG1iC-aTdSg6ItMC8l6n0Lozmy53XjqLbA,1199
|
|
40
39
|
etlplus/cli/state.py,sha256=Pfd8ru0wYIN7eGp1_A0tioqs1LiCDZCuJ6AnjZb6yYQ,8027
|
|
@@ -42,7 +41,7 @@ etlplus/cli/types.py,sha256=tclhKVJXDqHzlTQBYKARfqMgDOcuBJ-Zej2pvFy96WM,652
|
|
|
42
41
|
etlplus/config/__init__.py,sha256=VZWzOg7d2YR9NT6UwKTv44yf2FRUMjTHynkm1Dl5Qzo,1486
|
|
43
42
|
etlplus/config/connector.py,sha256=0-TIwevHbKRHVmucvyGpPd-3tB1dKHB-dj0yJ6kq5eY,9809
|
|
44
43
|
etlplus/config/jobs.py,sha256=hmzRCqt0OvCEZZR4ONKrd3lvSv0OmayjLc4yOBk3ug8,7399
|
|
45
|
-
etlplus/config/pipeline.py,sha256=
|
|
44
|
+
etlplus/config/pipeline.py,sha256=m4Jh0ctFcKrIx6zR7LEC0sYY5wq0o8NqOruWPlz6qmA,9494
|
|
46
45
|
etlplus/config/profile.py,sha256=Ss2zedQGjkaGSpvBLTD4SZaWViMJ7TJPLB8Q2_BTpPg,1898
|
|
47
46
|
etlplus/config/types.py,sha256=a0epJ3z16HQ5bY3Ctf8s_cQPa3f0HHcwdOcjCP2xoG4,4954
|
|
48
47
|
etlplus/config/utils.py,sha256=4SUHMkt5bKBhMhiJm-DrnmE2Q4TfOgdNCKz8PJDS27o,3443
|
|
@@ -52,14 +51,21 @@ etlplus/database/engine.py,sha256=7rr7YndA8LwyWJL8k1YhQbqxxmW4gWEUQjp0NwQcYtc,40
|
|
|
52
51
|
etlplus/database/orm.py,sha256=gCSqH-CjQz6tV9133-VqgiwokK5ylun0BwXaIWfImAo,10008
|
|
53
52
|
etlplus/database/schema.py,sha256=HNTgglI8qvQLInr7gq--2lLmLKHzAZTL2MJUOIw9DlY,7025
|
|
54
53
|
etlplus/database/types.py,sha256=_pkQyC14TzAlgyeIqZG4F5LWYknZbHw3TW68Auk7Ya0,795
|
|
54
|
+
etlplus/file/__init__.py,sha256=xd_Tvtzx7_PrGVb4Cjqp-v8p3P2qTPA3cZ14VzA1-0g,539
|
|
55
|
+
etlplus/file/core.py,sha256=NXTGSIKIo7HvLDlMtme37_d4NUhsf4RUNKp5mTj-wqU,8131
|
|
56
|
+
etlplus/file/csv.py,sha256=VbMW_NaqCw03HlfvYzb9MoAgCXI3cl9qc4dASkTHoyw,1880
|
|
57
|
+
etlplus/file/enums.py,sha256=NjgXQ0f53Xa1eyGKHvYkk58udjQI5TQfLVaoawfdXY0,7520
|
|
58
|
+
etlplus/file/json.py,sha256=xSV5PkZ_tZQuZNdLr1FQUwuCQXyL7Ch3WRJ3hkw0p68,1911
|
|
59
|
+
etlplus/file/xml.py,sha256=vjate5u9Z26LPlpvZsdzpqXsIUZRgen7oHa3ly-aIhs,3905
|
|
60
|
+
etlplus/file/yaml.py,sha256=6KaWoG7oYB26EHX2TZ7LOgigO11Hoq3MH--adFq_Eck,3004
|
|
55
61
|
etlplus/templates/__init__.py,sha256=tsniN7XJYs3NwYxJ6c2HD5upHP3CDkLx-bQCMt97UOM,106
|
|
56
62
|
etlplus/templates/ddl.sql.j2,sha256=s8fMWvcb4eaJVXkifuib1aQPljtZ8buuyB_uA-ZdU3Q,4734
|
|
57
63
|
etlplus/templates/view.sql.j2,sha256=Iy8DHfhq5yyvrUKDxqp_aHIEXY4Tm6j4wT7YDEFWAhk,2180
|
|
58
64
|
etlplus/validation/__init__.py,sha256=Pe5Xg1_EA4uiNZGYu5WTF3j7odjmyxnAJ8rcioaplSQ,1254
|
|
59
65
|
etlplus/validation/utils.py,sha256=Mtqg449VIke0ziy_wd2r6yrwJzQkA1iulZC87FzXMjo,10201
|
|
60
|
-
etlplus-0.
|
|
61
|
-
etlplus-0.
|
|
62
|
-
etlplus-0.
|
|
63
|
-
etlplus-0.
|
|
64
|
-
etlplus-0.
|
|
65
|
-
etlplus-0.
|
|
66
|
+
etlplus-0.11.2.dist-info/licenses/LICENSE,sha256=MuNO63i6kWmgnV2pbP2SLqP54mk1BGmu7CmbtxMmT-U,1069
|
|
67
|
+
etlplus-0.11.2.dist-info/METADATA,sha256=3I2djs1HEPyMy31qvH5oSlolJRgXi6qKXcjcLdLhFig,21036
|
|
68
|
+
etlplus-0.11.2.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
69
|
+
etlplus-0.11.2.dist-info/entry_points.txt,sha256=6w-2-jzuPa55spzK34h-UKh2JTEShh38adFRONNP9QE,45
|
|
70
|
+
etlplus-0.11.2.dist-info/top_level.txt,sha256=aWWF-udn_sLGuHTM6W6MLh99ArS9ROkUWO8Mi8y1_2U,8
|
|
71
|
+
etlplus-0.11.2.dist-info/RECORD,,
|