etlplus 0.10.4__py3-none-any.whl → 0.12.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (45) hide show
  1. etlplus/README.md +37 -0
  2. etlplus/api/README.md +20 -3
  3. etlplus/cli/README.md +40 -0
  4. etlplus/cli/commands.py +1 -1
  5. etlplus/cli/constants.py +1 -1
  6. etlplus/cli/handlers.py +1 -1
  7. etlplus/cli/io.py +2 -2
  8. etlplus/config/README.md +52 -0
  9. etlplus/config/pipeline.py +2 -2
  10. etlplus/database/README.md +48 -0
  11. etlplus/database/ddl.py +1 -1
  12. etlplus/database/engine.py +1 -1
  13. etlplus/database/schema.py +1 -1
  14. etlplus/enums.py +2 -270
  15. etlplus/extract.py +5 -7
  16. etlplus/file/README.md +105 -0
  17. etlplus/file/__init__.py +25 -0
  18. etlplus/file/avro.py +198 -0
  19. etlplus/file/core.py +287 -0
  20. etlplus/file/csv.py +91 -0
  21. etlplus/file/enums.py +238 -0
  22. etlplus/file/feather.py +144 -0
  23. etlplus/file/gz.py +123 -0
  24. etlplus/file/json.py +98 -0
  25. etlplus/file/ndjson.py +109 -0
  26. etlplus/file/orc.py +142 -0
  27. etlplus/file/parquet.py +146 -0
  28. etlplus/file/tsv.py +91 -0
  29. etlplus/file/txt.py +99 -0
  30. etlplus/file/xls.py +132 -0
  31. etlplus/file/xlsx.py +142 -0
  32. etlplus/file/xml.py +174 -0
  33. etlplus/file/yaml.py +136 -0
  34. etlplus/file/zip.py +175 -0
  35. etlplus/load.py +9 -12
  36. etlplus/run.py +6 -9
  37. etlplus/templates/README.md +46 -0
  38. etlplus/validation/README.md +50 -0
  39. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/METADATA +58 -14
  40. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/RECORD +44 -20
  41. etlplus/file.py +0 -652
  42. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/WHEEL +0 -0
  43. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/entry_points.txt +0 -0
  44. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/licenses/LICENSE +0 -0
  45. {etlplus-0.10.4.dist-info → etlplus-0.12.2.dist-info}/top_level.txt +0 -0
etlplus/file/xlsx.py ADDED
@@ -0,0 +1,142 @@
1
+ """
2
+ :mod:`etlplus.file.xlsx` module.
3
+
4
+ Helpers for reading/writing Excel XLSX files.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from pathlib import Path
10
+ from typing import Any
11
+ from typing import cast
12
+
13
+ from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..types import JSONList
16
+
17
+ # SECTION: EXPORTS ========================================================== #
18
+
19
+
20
+ __all__ = [
21
+ 'read',
22
+ 'write',
23
+ ]
24
+
25
+
26
+ # SECTION: INTERNAL CONSTANTS =============================================== #
27
+
28
+
29
+ _PANDAS_CACHE: dict[str, Any] = {}
30
+
31
+
32
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
33
+
34
+
35
+ def _get_pandas() -> Any:
36
+ """
37
+ Return the pandas module, importing it on first use.
38
+
39
+ Raises an informative ImportError if the optional dependency is missing.
40
+ """
41
+ mod = _PANDAS_CACHE.get('mod')
42
+ if mod is not None: # pragma: no cover - tiny branch
43
+ return mod
44
+ try:
45
+ _pd = __import__('pandas') # type: ignore[assignment]
46
+ except ImportError as e: # pragma: no cover
47
+ raise ImportError(
48
+ 'XLSX support requires optional dependency "pandas".\n'
49
+ 'Install with: pip install pandas',
50
+ ) from e
51
+ _PANDAS_CACHE['mod'] = _pd
52
+
53
+ return _pd
54
+
55
+
56
+ def _normalize_records(data: JSONData) -> JSONList:
57
+ """
58
+ Normalize JSON payloads into a list of dictionaries.
59
+
60
+ Raises TypeError when payloads contain non-dict items.
61
+ """
62
+ if isinstance(data, list):
63
+ if not all(isinstance(item, dict) for item in data):
64
+ raise TypeError('XLSX payloads must contain only objects (dicts)')
65
+ return cast(JSONList, data)
66
+ return [cast(JSONDict, data)]
67
+
68
+
69
+ # SECTION: FUNCTIONS ======================================================== #
70
+
71
+
72
+ def read(
73
+ path: Path,
74
+ ) -> JSONList:
75
+ """
76
+ Read XLSX content from ``path``.
77
+
78
+ Parameters
79
+ ----------
80
+ path : Path
81
+ Path to the XLSX file on disk.
82
+
83
+ Returns
84
+ -------
85
+ JSONList
86
+ The list of dictionaries read from the XLSX file.
87
+
88
+ Raises
89
+ ------
90
+ ImportError
91
+ If optional dependencies for XLSX support are missing.
92
+ """
93
+ pandas = _get_pandas()
94
+ try:
95
+ frame = pandas.read_excel(path)
96
+ except ImportError as e: # pragma: no cover
97
+ raise ImportError(
98
+ 'XLSX support requires optional dependency "openpyxl".\n'
99
+ 'Install with: pip install openpyxl',
100
+ ) from e
101
+ return cast(JSONList, frame.to_dict(orient='records'))
102
+
103
+
104
+ def write(
105
+ path: Path,
106
+ data: JSONData,
107
+ ) -> int:
108
+ """
109
+ Write ``data`` to XLSX at ``path`` and return record count.
110
+
111
+ Parameters
112
+ ----------
113
+ path : Path
114
+ Path to the XLSX file on disk.
115
+ data : JSONData
116
+ Data to write.
117
+
118
+ Returns
119
+ -------
120
+ int
121
+ Number of records written.
122
+
123
+ Raises
124
+ ------
125
+ ImportError
126
+ If optional dependencies for XLSX support are missing.
127
+ """
128
+ records = _normalize_records(data)
129
+ if not records:
130
+ return 0
131
+
132
+ pandas = _get_pandas()
133
+ path.parent.mkdir(parents=True, exist_ok=True)
134
+ frame = pandas.DataFrame.from_records(records)
135
+ try:
136
+ frame.to_excel(path, index=False)
137
+ except ImportError as e: # pragma: no cover
138
+ raise ImportError(
139
+ 'XLSX support requires optional dependency "openpyxl".\n'
140
+ 'Install with: pip install openpyxl',
141
+ ) from e
142
+ return len(records)
etlplus/file/xml.py ADDED
@@ -0,0 +1,174 @@
1
+ """
2
+ :mod:`etlplus.file.xml` module.
3
+
4
+ Helpers for reading/writing XML files.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import xml.etree.ElementTree as ET
10
+ from pathlib import Path
11
+ from typing import Any
12
+
13
+ from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..utils import count_records
16
+
17
+ # SECTION: EXPORTS ========================================================== #
18
+
19
+
20
+ __all__ = [
21
+ 'read',
22
+ 'write',
23
+ ]
24
+
25
+
26
+ # SECTION: CONSTANTS ======================================================== #
27
+
28
+
29
+ DEFAULT_XML_ROOT = 'root'
30
+
31
+
32
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
33
+
34
+
35
+ def _dict_to_element(
36
+ name: str,
37
+ payload: Any,
38
+ ) -> ET.Element:
39
+ """
40
+ Convert a dictionary-like payload into an XML element.
41
+
42
+ Parameters
43
+ ----------
44
+ name : str
45
+ Name of the XML element.
46
+ payload : Any
47
+ The data to include in the XML element.
48
+
49
+ Returns
50
+ -------
51
+ ET.Element
52
+ The constructed XML element.
53
+ """
54
+ element = ET.Element(name)
55
+
56
+ if isinstance(payload, dict):
57
+ text = payload.get('text')
58
+ if text is not None:
59
+ element.text = str(text)
60
+
61
+ for key, value in payload.items():
62
+ if key == 'text':
63
+ continue
64
+ if key.startswith('@'):
65
+ element.set(key[1:], str(value))
66
+ continue
67
+ if isinstance(value, list):
68
+ for item in value:
69
+ element.append(_dict_to_element(key, item))
70
+ else:
71
+ element.append(_dict_to_element(key, value))
72
+ elif isinstance(payload, list):
73
+ for item in payload:
74
+ element.append(_dict_to_element('item', item))
75
+ elif payload is not None:
76
+ element.text = str(payload)
77
+
78
+ return element
79
+
80
+
81
+ def _element_to_dict(
82
+ element: ET.Element,
83
+ ) -> JSONDict:
84
+ """
85
+ Convert an XML element into a nested dictionary.
86
+
87
+ Parameters
88
+ ----------
89
+ element : ET.Element
90
+ XML element to convert.
91
+
92
+ Returns
93
+ -------
94
+ JSONDict
95
+ Nested dictionary representation of the XML element.
96
+ """
97
+ result: JSONDict = {}
98
+ text = (element.text or '').strip()
99
+ if text:
100
+ result['text'] = text
101
+
102
+ for child in element:
103
+ child_data = _element_to_dict(child)
104
+ tag = child.tag
105
+ if tag in result:
106
+ existing = result[tag]
107
+ if isinstance(existing, list):
108
+ existing.append(child_data)
109
+ else:
110
+ result[tag] = [existing, child_data]
111
+ else:
112
+ result[tag] = child_data
113
+
114
+ for key, value in element.attrib.items():
115
+ if key in result:
116
+ result[f'@{key}'] = value
117
+ else:
118
+ result[key] = value
119
+ return result
120
+
121
+
122
+ # SECTION: FUNCTIONS ======================================================== #
123
+
124
+
125
+ def read(
126
+ path: Path,
127
+ ) -> JSONDict:
128
+ """
129
+ Read XML content from ``path``.
130
+
131
+ Parameters
132
+ ----------
133
+ path : Path
134
+ Path to the XML file on disk.
135
+
136
+ Returns
137
+ -------
138
+ JSONDict
139
+ Nested dictionary representation of the XML file.
140
+ """
141
+ tree = ET.parse(path)
142
+ root = tree.getroot()
143
+
144
+ return {root.tag: _element_to_dict(root)}
145
+
146
+
147
+ def write(path: Path, data: JSONData, *, root_tag: str) -> int:
148
+ """
149
+ Write ``data`` to XML at ``path`` and return record count.
150
+
151
+ Parameters
152
+ ----------
153
+ path : Path
154
+ Path to the XML file on disk.
155
+ data : JSONData
156
+ Data to write as XML.
157
+ root_tag : str
158
+ Root tag name to use when writing XML files.
159
+
160
+ Returns
161
+ -------
162
+ int
163
+ The number of records written to the XML file.
164
+ """
165
+ if isinstance(data, dict) and len(data) == 1:
166
+ root_name, payload = next(iter(data.items()))
167
+ root_element = _dict_to_element(str(root_name), payload)
168
+ else:
169
+ root_element = _dict_to_element(root_tag, data)
170
+
171
+ tree = ET.ElementTree(root_element)
172
+ tree.write(path, encoding='utf-8', xml_declaration=True)
173
+
174
+ return count_records(data)
etlplus/file/yaml.py ADDED
@@ -0,0 +1,136 @@
1
+ """
2
+ :mod:`etlplus.file.yaml` module.
3
+
4
+ Helpers for reading/writing YAML files.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from pathlib import Path
10
+ from typing import Any
11
+ from typing import cast
12
+
13
+ from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..types import JSONList
16
+ from ..utils import count_records
17
+
18
+ # SECTION: EXPORTS ========================================================== #
19
+
20
+
21
+ __all__ = [
22
+ 'read',
23
+ 'write',
24
+ ]
25
+
26
+
27
+ # SECTION: INTERNAL CONSTANTS =============================================== #
28
+
29
+
30
+ # Optional YAML support (lazy-loaded to avoid hard dependency)
31
+ # Cached access function to avoid global statements.
32
+ _YAML_CACHE: dict[str, Any] = {}
33
+
34
+
35
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
36
+
37
+
38
+ def _get_yaml() -> Any:
39
+ """
40
+ Return the PyYAML module, importing it on first use.
41
+
42
+ Raises an informative ImportError if the optional dependency is missing.
43
+ """
44
+ mod = _YAML_CACHE.get('mod')
45
+ if mod is not None: # pragma: no cover - tiny branch
46
+ return mod
47
+ try:
48
+ _yaml_mod = __import__('yaml') # type: ignore[assignment]
49
+ except ImportError as e: # pragma: no cover
50
+ raise ImportError(
51
+ 'YAML support requires optional dependency "PyYAML".\n'
52
+ 'Install with: pip install PyYAML',
53
+ ) from e
54
+ _YAML_CACHE['mod'] = _yaml_mod
55
+
56
+ return _yaml_mod
57
+
58
+
59
+ def _require_yaml() -> None:
60
+ """Ensure PyYAML is available or raise an informative error."""
61
+ _get_yaml()
62
+
63
+
64
+ # SECTION: FUNCTIONS ======================================================== #
65
+
66
+
67
+ def read(
68
+ path: Path,
69
+ ) -> JSONData:
70
+ """
71
+ Read YAML content from ``path``.
72
+
73
+ Validates that the YAML root is a dict or a list of dicts.
74
+
75
+ Parameters
76
+ ----------
77
+ path : Path
78
+ Path to the YAML file on disk.
79
+
80
+ Returns
81
+ -------
82
+ JSONData
83
+ The structured data read from the YAML file.
84
+
85
+ Raises
86
+ ------
87
+ TypeError
88
+ If the YAML root is not an object or an array of objects.
89
+ """
90
+ _require_yaml()
91
+
92
+ with path.open('r', encoding='utf-8') as handle:
93
+ loaded = _get_yaml().safe_load(handle)
94
+
95
+ if isinstance(loaded, dict):
96
+ return cast(JSONDict, loaded)
97
+ if isinstance(loaded, list):
98
+ if all(isinstance(item, dict) for item in loaded):
99
+ return cast(JSONList, loaded)
100
+ raise TypeError(
101
+ 'YAML array must contain only objects (dicts) when loading',
102
+ )
103
+ raise TypeError(
104
+ 'YAML root must be an object or an array of objects when loading',
105
+ )
106
+
107
+
108
+ def write(
109
+ path: Path,
110
+ data: JSONData,
111
+ ) -> int:
112
+ """
113
+ Write ``data`` as YAML to ``path`` and return record count.
114
+
115
+ Parameters
116
+ ----------
117
+ path : Path
118
+ Path to the YAML file on disk.
119
+ data : JSONData
120
+ Data to write as YAML.
121
+
122
+ Returns
123
+ -------
124
+ int
125
+ The number of records written.
126
+ """
127
+ _require_yaml()
128
+ with path.open('w', encoding='utf-8') as handle:
129
+ _get_yaml().safe_dump(
130
+ data,
131
+ handle,
132
+ sort_keys=False,
133
+ allow_unicode=True,
134
+ default_flow_style=False,
135
+ )
136
+ return count_records(data)
etlplus/file/zip.py ADDED
@@ -0,0 +1,175 @@
1
+ """
2
+ :mod:`etlplus.file.zip` module.
3
+
4
+ Helpers for reading/writing ZIP files.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import tempfile
10
+ import zipfile
11
+ from pathlib import Path
12
+
13
+ from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from .enums import CompressionFormat
16
+ from .enums import FileFormat
17
+ from .enums import infer_file_format_and_compression
18
+
19
+ # SECTION: EXPORTS ========================================================== #
20
+
21
+
22
+ __all__ = [
23
+ 'read',
24
+ 'write',
25
+ ]
26
+
27
+
28
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
29
+
30
+
31
+ def _resolve_format(
32
+ filename: str,
33
+ ) -> FileFormat:
34
+ """
35
+ Resolve the inner file format from a filename.
36
+
37
+ Parameters
38
+ ----------
39
+ filename : str
40
+ The name of the file inside the ZIP archive.
41
+
42
+ Returns
43
+ -------
44
+ FileFormat
45
+ The inferred inner file format.
46
+
47
+ Raises
48
+ ------
49
+ ValueError
50
+ If the file format cannot be inferred from the filename.
51
+ """
52
+ fmt, compression = infer_file_format_and_compression(filename)
53
+ if compression is not None and compression is not CompressionFormat.ZIP:
54
+ raise ValueError(f'Unexpected compression in archive: {filename}')
55
+ if fmt is None:
56
+ raise ValueError(
57
+ f'Cannot infer file format from compressed file {filename!r}',
58
+ )
59
+ return fmt
60
+
61
+
62
+ def _extract_payload(
63
+ entry: zipfile.ZipInfo,
64
+ archive: zipfile.ZipFile,
65
+ ) -> bytes:
66
+ """
67
+ Extract an archive entry into memory.
68
+
69
+ Parameters
70
+ ----------
71
+ entry : zipfile.ZipInfo
72
+ The ZIP archive entry.
73
+ archive : zipfile.ZipFile
74
+ The opened ZIP archive.
75
+
76
+ Returns
77
+ -------
78
+ bytes
79
+ The raw payload.
80
+ """
81
+ with archive.open(entry, 'r') as handle:
82
+ return handle.read()
83
+
84
+
85
+ # SECTION: FUNCTIONS ======================================================== #
86
+
87
+
88
+ def read(
89
+ path: Path,
90
+ ) -> JSONData:
91
+ """
92
+ Read ZIP content from ``path`` and parse the inner payload(s).
93
+
94
+ Parameters
95
+ ----------
96
+ path : Path
97
+ Path to the ZIP file on disk.
98
+
99
+ Returns
100
+ -------
101
+ JSONData
102
+ Parsed payload.
103
+
104
+ Raises
105
+ ------
106
+ ValueError
107
+ If the ZIP archive is empty.
108
+ """
109
+ with zipfile.ZipFile(path, 'r') as archive:
110
+ entries = [entry for entry in archive.infolist() if not entry.is_dir()]
111
+ if not entries:
112
+ raise ValueError(f'ZIP archive is empty: {path}')
113
+
114
+ if len(entries) == 1:
115
+ entry = entries[0]
116
+ fmt = _resolve_format(entry.filename)
117
+ payload = _extract_payload(entry, archive)
118
+ with tempfile.TemporaryDirectory() as tmpdir:
119
+ tmp_path = Path(tmpdir) / Path(entry.filename).name
120
+ tmp_path.write_bytes(payload)
121
+ from .core import File
122
+
123
+ return File(tmp_path, fmt).read()
124
+
125
+ results: JSONDict = {}
126
+ for entry in entries:
127
+ fmt = _resolve_format(entry.filename)
128
+ payload = _extract_payload(entry, archive)
129
+ with tempfile.TemporaryDirectory() as tmpdir:
130
+ tmp_path = Path(tmpdir) / Path(entry.filename).name
131
+ tmp_path.write_bytes(payload)
132
+ from .core import File
133
+
134
+ results[entry.filename] = File(tmp_path, fmt).read()
135
+ return results
136
+
137
+
138
+ def write(
139
+ path: Path,
140
+ data: JSONData,
141
+ ) -> int:
142
+ """
143
+ Write ``data`` to ZIP at ``path`` and return record count.
144
+
145
+ Parameters
146
+ ----------
147
+ path : Path
148
+ Path to the ZIP file on disk.
149
+ data : JSONData
150
+ Data to write.
151
+
152
+ Returns
153
+ -------
154
+ int
155
+ Number of records written.
156
+ """
157
+ fmt = _resolve_format(path.name)
158
+ inner_name = Path(path.name).with_suffix('').name
159
+
160
+ with tempfile.TemporaryDirectory() as tmpdir:
161
+ tmp_path = Path(tmpdir) / inner_name
162
+ from .core import File
163
+
164
+ count = File(tmp_path, fmt).write(data)
165
+ payload = tmp_path.read_bytes()
166
+
167
+ path.parent.mkdir(parents=True, exist_ok=True)
168
+ with zipfile.ZipFile(
169
+ path,
170
+ 'w',
171
+ compression=zipfile.ZIP_DEFLATED,
172
+ ) as archive:
173
+ archive.writestr(inner_name, payload)
174
+
175
+ return count
etlplus/load.py CHANGED
@@ -15,12 +15,9 @@ from typing import cast
15
15
  import requests # type: ignore[import]
16
16
 
17
17
  from .enums import DataConnectorType
18
- from .enums import FileFormat
19
18
  from .enums import HttpMethod
20
- from .enums import coerce_data_connector_type
21
- from .enums import coerce_file_format
22
- from .enums import coerce_http_method
23
19
  from .file import File
20
+ from .file import FileFormat
24
21
  from .types import JSONData
25
22
  from .types import JSONDict
26
23
  from .types import JSONList
@@ -101,7 +98,7 @@ def load_data(
101
98
  return cast(JSONData, source)
102
99
 
103
100
  if isinstance(source, Path):
104
- return File(source, FileFormat.JSON).read_json()
101
+ return File(source, FileFormat.JSON).read()
105
102
 
106
103
  if isinstance(source, str):
107
104
  # Special case: '-' means read JSON from STDIN (Unix convention).
@@ -111,7 +108,7 @@ def load_data(
111
108
  candidate = Path(source)
112
109
  if candidate.exists():
113
110
  try:
114
- return File(candidate, FileFormat.JSON).read_json()
111
+ return File(candidate, FileFormat.JSON).read()
115
112
  except (OSError, json.JSONDecodeError, ValueError):
116
113
  # Fall back to treating the string as raw JSON content.
117
114
  pass
@@ -155,9 +152,9 @@ def load_to_file(
155
152
  if file_format is None:
156
153
  records = File(path).write(data)
157
154
  ext = path.suffix.lstrip('.').lower()
158
- fmt = coerce_file_format(ext) if ext else FileFormat.JSON
155
+ fmt = FileFormat.coerce(ext) if ext else FileFormat.JSON
159
156
  else:
160
- fmt = coerce_file_format(file_format)
157
+ fmt = FileFormat.coerce(file_format)
161
158
  records = File(path, fmt).write(data)
162
159
  if fmt is FileFormat.CSV and records == 0:
163
160
  message = 'No data to write'
@@ -242,7 +239,7 @@ def load_to_api(
242
239
  TypeError
243
240
  If the session object is not valid.
244
241
  """
245
- http_method = coerce_http_method(method)
242
+ http_method = HttpMethod.coerce(method)
246
243
 
247
244
  # Apply a conservative timeout to guard against hanging requests.
248
245
  timeout = kwargs.pop('timeout', 10.0)
@@ -316,7 +313,7 @@ def load(
316
313
  """
317
314
  data = load_data(source)
318
315
 
319
- match coerce_data_connector_type(target_type):
316
+ match DataConnectorType.coerce(target_type):
320
317
  case DataConnectorType.FILE:
321
318
  # Prefer explicit format if provided, else infer from filename.
322
319
  return load_to_file(data, target, file_format)
@@ -331,6 +328,6 @@ def load(
331
328
  **kwargs,
332
329
  )
333
330
  case _:
334
- # `coerce_data_connector_type` covers invalid entries, but keep
335
- # explicit guard.
331
+ # :meth:`coerce` already raises for invalid connector types, but
332
+ # keep explicit guard for defensive programming.
336
333
  raise ValueError(f'Invalid target type: {target_type}')