etlplus 0.5.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. etlplus/__init__.py +43 -0
  2. etlplus/__main__.py +22 -0
  3. etlplus/__version__.py +14 -0
  4. etlplus/api/README.md +237 -0
  5. etlplus/api/__init__.py +136 -0
  6. etlplus/api/auth.py +432 -0
  7. etlplus/api/config.py +633 -0
  8. etlplus/api/endpoint_client.py +885 -0
  9. etlplus/api/errors.py +170 -0
  10. etlplus/api/pagination/__init__.py +47 -0
  11. etlplus/api/pagination/client.py +188 -0
  12. etlplus/api/pagination/config.py +440 -0
  13. etlplus/api/pagination/paginator.py +775 -0
  14. etlplus/api/rate_limiting/__init__.py +38 -0
  15. etlplus/api/rate_limiting/config.py +343 -0
  16. etlplus/api/rate_limiting/rate_limiter.py +266 -0
  17. etlplus/api/request_manager.py +589 -0
  18. etlplus/api/retry_manager.py +430 -0
  19. etlplus/api/transport.py +325 -0
  20. etlplus/api/types.py +172 -0
  21. etlplus/cli/__init__.py +15 -0
  22. etlplus/cli/app.py +1367 -0
  23. etlplus/cli/handlers.py +775 -0
  24. etlplus/cli/main.py +616 -0
  25. etlplus/config/__init__.py +56 -0
  26. etlplus/config/connector.py +372 -0
  27. etlplus/config/jobs.py +311 -0
  28. etlplus/config/pipeline.py +339 -0
  29. etlplus/config/profile.py +78 -0
  30. etlplus/config/types.py +204 -0
  31. etlplus/config/utils.py +120 -0
  32. etlplus/ddl.py +197 -0
  33. etlplus/enums.py +414 -0
  34. etlplus/extract.py +218 -0
  35. etlplus/file.py +657 -0
  36. etlplus/load.py +336 -0
  37. etlplus/mixins.py +62 -0
  38. etlplus/py.typed +0 -0
  39. etlplus/run.py +368 -0
  40. etlplus/run_helpers.py +843 -0
  41. etlplus/templates/__init__.py +5 -0
  42. etlplus/templates/ddl.sql.j2 +128 -0
  43. etlplus/templates/view.sql.j2 +69 -0
  44. etlplus/transform.py +1049 -0
  45. etlplus/types.py +227 -0
  46. etlplus/utils.py +638 -0
  47. etlplus/validate.py +493 -0
  48. etlplus/validation/__init__.py +44 -0
  49. etlplus/validation/utils.py +389 -0
  50. etlplus-0.5.4.dist-info/METADATA +616 -0
  51. etlplus-0.5.4.dist-info/RECORD +55 -0
  52. etlplus-0.5.4.dist-info/WHEEL +5 -0
  53. etlplus-0.5.4.dist-info/entry_points.txt +2 -0
  54. etlplus-0.5.4.dist-info/licenses/LICENSE +21 -0
  55. etlplus-0.5.4.dist-info/top_level.txt +1 -0
etlplus/file.py ADDED
@@ -0,0 +1,657 @@
1
+ """
2
+ :mod:`etlplus.file` module.
3
+
4
+ Shared helpers for reading and writing structured and semi-structured data
5
+ files.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import csv
11
+ import json
12
+ import xml.etree.ElementTree as ET
13
+ from dataclasses import dataclass
14
+ from pathlib import Path
15
+ from typing import Any
16
+ from typing import cast
17
+
18
+ from .enums import FileFormat
19
+ from .types import JSONData
20
+ from .types import JSONDict
21
+ from .types import JSONList
22
+ from .types import StrPath
23
+ from .utils import count_records
24
+
25
+ # SECTION: EXPORTS ========================================================== #
26
+
27
+
28
+ __all__ = ['File']
29
+
30
+
31
+ # SECTION: INTERNAL CONSTANTS ============================================== #
32
+
33
+
34
+ _DEFAULT_XML_ROOT = 'root'
35
+
36
+ # Map common filename extensions to FileFormat (used for inference)
37
+ _EXT_TO_FORMAT: dict[str, FileFormat] = {
38
+ 'csv': FileFormat.CSV,
39
+ 'json': FileFormat.JSON,
40
+ 'xml': FileFormat.XML,
41
+ 'yaml': FileFormat.YAML,
42
+ 'yml': FileFormat.YAML,
43
+ }
44
+
45
+ # Optional YAML support (lazy-loaded to avoid hard dependency)
46
+ # Cached access function to avoid global statements.
47
+ _YAML_CACHE: dict[str, Any] = {}
48
+
49
+
50
+ # SECTION: INTERNAL FUNCTIONS ============================================== #
51
+
52
+
53
+ def _dict_to_element(
54
+ name: str,
55
+ payload: Any,
56
+ ) -> ET.Element:
57
+ """
58
+ Convert a dictionary-like payload into an XML element.
59
+
60
+ Parameters
61
+ ----------
62
+ name : str
63
+ Name of the XML element.
64
+ payload : Any
65
+ The data to include in the XML element.
66
+
67
+ Returns
68
+ -------
69
+ ET.Element
70
+ The constructed XML element.
71
+ """
72
+ element = ET.Element(name)
73
+
74
+ if isinstance(payload, dict):
75
+ text = payload.get('text')
76
+ if text is not None:
77
+ element.text = str(text)
78
+
79
+ for key, value in payload.items():
80
+ if key == 'text':
81
+ continue
82
+ if key.startswith('@'):
83
+ element.set(key[1:], str(value))
84
+ continue
85
+ if isinstance(value, list):
86
+ for item in value:
87
+ element.append(_dict_to_element(key, item))
88
+ else:
89
+ element.append(_dict_to_element(key, value))
90
+ elif isinstance(payload, list):
91
+ for item in payload:
92
+ element.append(_dict_to_element('item', item))
93
+ elif payload is not None:
94
+ element.text = str(payload)
95
+
96
+ return element
97
+
98
+
99
+ def _element_to_dict(
100
+ element: ET.Element,
101
+ ) -> JSONDict:
102
+ """
103
+ Convert an XML element into a nested dictionary.
104
+
105
+ Parameters
106
+ ----------
107
+ element : ET.Element
108
+ XML element to convert.
109
+
110
+ Returns
111
+ -------
112
+ JSONDict
113
+ Nested dictionary representation of the XML element.
114
+ """
115
+ result: JSONDict = {}
116
+ text = (element.text or '').strip()
117
+ if text:
118
+ result['text'] = text
119
+
120
+ for child in element:
121
+ child_data = _element_to_dict(child)
122
+ tag = child.tag
123
+ if tag in result:
124
+ existing = result[tag]
125
+ if isinstance(existing, list):
126
+ existing.append(child_data)
127
+ else:
128
+ result[tag] = [existing, child_data]
129
+ else:
130
+ result[tag] = child_data
131
+
132
+ for key, value in element.attrib.items():
133
+ if key in result:
134
+ result[f'@{key}'] = value
135
+ else:
136
+ result[key] = value
137
+ return result
138
+
139
+
140
+ def _get_yaml() -> Any:
141
+ """
142
+ Return the PyYAML module, importing it on first use.
143
+
144
+ Raises an informative ImportError if the optional dependency is missing.
145
+ """
146
+ mod = _YAML_CACHE.get('mod')
147
+ if mod is not None: # pragma: no cover - tiny branch
148
+ return mod
149
+ try:
150
+ _yaml_mod = __import__('yaml') # type: ignore[assignment]
151
+ except ImportError as e: # pragma: no cover
152
+ raise ImportError(
153
+ 'YAML support requires optional dependency "PyYAML".\n'
154
+ 'Install with: pip install PyYAML',
155
+ ) from e
156
+ _YAML_CACHE['mod'] = _yaml_mod
157
+
158
+ return _yaml_mod
159
+
160
+
161
+ # SECTION: CLASS ============================================================ #
162
+
163
+
164
+ @dataclass(slots=True)
165
+ class File:
166
+ """
167
+ Convenience wrapper around structured file IO.
168
+
169
+ This class encapsulates the one-off helpers in this module as convenient
170
+ instance methods while retaining the original function API for
171
+ backward compatibility (those functions delegate to this class).
172
+
173
+ Attributes
174
+ ----------
175
+ path : Path
176
+ Path to the file on disk.
177
+ file_format : FileFormat | None, optional
178
+ Explicit format. If omitted, the format is inferred from the file
179
+ extension (``.csv``, ``.json``, or ``.xml``).
180
+ """
181
+
182
+ # -- Attributes -- #
183
+
184
+ path: Path
185
+ file_format: FileFormat | None = None
186
+
187
+ # -- Magic Methods (Object Lifecycle) -- #
188
+
189
+ def __post_init__(self) -> None:
190
+ """
191
+ Auto-detect and set the file format on initialization.
192
+
193
+ If no explicit ``file_format`` is provided, attempt to infer it from
194
+ the file path's extension and update :attr:`file_format`. If the
195
+ extension is unknown, the attribute is left as ``None`` and will be
196
+ validated later by :meth:`_ensure_format`.
197
+ """
198
+ # Normalize incoming path (allow str in constructor) to Path.
199
+ if isinstance(self.path, str):
200
+ self.path = Path(self.path)
201
+
202
+ if self.file_format is None:
203
+ try:
204
+ self.file_format = self._guess_format()
205
+ except ValueError:
206
+ # Leave as None; _ensure_format() will raise on use if needed.
207
+ pass
208
+
209
+ # -- Internal Instance Methods -- #
210
+
211
+ def _assert_exists(self) -> None:
212
+ """
213
+ Raise FileNotFoundError if :attr:`path` does not exist.
214
+
215
+ This centralizes existence checks across multiple read methods.
216
+ """
217
+ if not self.path.exists():
218
+ raise FileNotFoundError(f'File not found: {self.path}')
219
+
220
+ def _ensure_format(self) -> FileFormat:
221
+ """
222
+ Resolve the active format, guessing from extension if needed.
223
+
224
+ Returns
225
+ -------
226
+ FileFormat
227
+ The resolved file format.
228
+ """
229
+ return (
230
+ self.file_format
231
+ if self.file_format is not None
232
+ else self._guess_format()
233
+ )
234
+
235
+ def _guess_format(self) -> FileFormat:
236
+ """
237
+ Infer the file format from the filename extension.
238
+
239
+ Returns
240
+ -------
241
+ FileFormat
242
+ The inferred file format based on the file extension.
243
+
244
+ Raises
245
+ ------
246
+ ValueError
247
+ If the extension is unknown or unsupported.
248
+ """
249
+ ext = self.path.suffix.lstrip('.').casefold()
250
+ try:
251
+ return _EXT_TO_FORMAT[ext]
252
+ except KeyError as e:
253
+ raise ValueError(
254
+ 'Cannot infer file format from '
255
+ f'extension {self.path.suffix!r}',
256
+ ) from e
257
+
258
+ # -- Instance Methods (Generic API) -- #
259
+
260
+ def read(self) -> JSONData:
261
+ """
262
+ Read structured data from :attr:`path` using :attr:`file_format`.
263
+
264
+ Returns
265
+ -------
266
+ JSONData
267
+ The structured data read from the file.
268
+
269
+ Raises
270
+ ------
271
+ ValueError
272
+ If the resolved file format is unsupported.
273
+ """
274
+ fmt = self._ensure_format()
275
+ match fmt:
276
+ case FileFormat.JSON:
277
+ return self.read_json()
278
+ case FileFormat.CSV:
279
+ return self.read_csv()
280
+ case FileFormat.XML:
281
+ return self.read_xml()
282
+ case FileFormat.YAML:
283
+ return self.read_yaml()
284
+ raise ValueError(f'Unsupported format: {fmt}')
285
+
286
+ def write(
287
+ self,
288
+ data: JSONData,
289
+ *,
290
+ root_tag: str = _DEFAULT_XML_ROOT,
291
+ ) -> int:
292
+ """
293
+ Write ``data`` to :attr:`path` using :attr:`file_format`.
294
+
295
+ Parameters
296
+ ----------
297
+ data : JSONData
298
+ Data to write to the file.
299
+ root_tag : str, optional
300
+ Root tag name to use when writing XML files. Defaults to
301
+ ``'root'``.
302
+
303
+ Returns
304
+ -------
305
+ int
306
+ The number of records written.
307
+
308
+ Raises
309
+ ------
310
+ ValueError
311
+ If the resolved file format is unsupported.
312
+ """
313
+ fmt = self._ensure_format()
314
+ match fmt:
315
+ case FileFormat.JSON:
316
+ return self.write_json(data)
317
+ case FileFormat.CSV:
318
+ return self.write_csv(data)
319
+ case FileFormat.XML:
320
+ return self.write_xml(data, root_tag=root_tag)
321
+ case FileFormat.YAML:
322
+ return self.write_yaml(data)
323
+ raise ValueError(f'Unsupported format: {fmt}')
324
+
325
+ # -- Instance Methods (CSV) -- #
326
+
327
+ def read_csv(self) -> JSONList:
328
+ """
329
+ Load CSV content as a list of dictionaries from :attr:`path`.
330
+
331
+ Returns
332
+ -------
333
+ JSONList
334
+ The list of dictionaries read from the CSV file.
335
+ """
336
+ self._assert_exists()
337
+
338
+ with self.path.open('r', encoding='utf-8', newline='') as handle:
339
+ reader: csv.DictReader[str] = csv.DictReader(handle)
340
+ rows: JSONList = []
341
+ for row in reader:
342
+ if not any(row.values()):
343
+ continue
344
+ rows.append(cast(JSONDict, dict(row)))
345
+ return rows
346
+
347
+ def write_csv(
348
+ self,
349
+ data: JSONData,
350
+ ) -> int:
351
+ """
352
+ Write CSV rows to :attr:`path` and return the number of rows.
353
+
354
+ Parameters
355
+ ----------
356
+ data : JSONData
357
+ Data to write as CSV. Should be a list of dictionaries or a
358
+ single dictionary.
359
+
360
+ Returns
361
+ -------
362
+ int
363
+ The number of rows written to the CSV file.
364
+ """
365
+ rows: list[JSONDict]
366
+ if isinstance(data, list):
367
+ rows = [row for row in data if isinstance(row, dict)]
368
+ else:
369
+ rows = [data]
370
+
371
+ if not rows:
372
+ return 0
373
+
374
+ fieldnames = sorted({key for row in rows for key in row})
375
+ with self.path.open('w', encoding='utf-8', newline='') as handle:
376
+ writer = csv.DictWriter(handle, fieldnames=fieldnames)
377
+ writer.writeheader()
378
+ for row in rows:
379
+ writer.writerow(
380
+ {field: row.get(field) for field in fieldnames},
381
+ )
382
+
383
+ return len(rows)
384
+
385
+ # -- Instance Methods (JSON) -- #
386
+
387
+ def read_json(self) -> JSONData:
388
+ """
389
+ Load and validate JSON payloads from :attr:`path`.
390
+
391
+ Returns
392
+ -------
393
+ JSONData
394
+ The structured data read from the JSON file.
395
+
396
+ Raises
397
+ ------
398
+ TypeError
399
+ If the JSON root is not an object or an array of objects.
400
+ """
401
+ self._assert_exists()
402
+
403
+ with self.path.open('r', encoding='utf-8') as handle:
404
+ loaded = json.load(handle)
405
+
406
+ if isinstance(loaded, dict):
407
+ return cast(JSONDict, loaded)
408
+ if isinstance(loaded, list):
409
+ if all(isinstance(item, dict) for item in loaded):
410
+ return cast(JSONList, loaded)
411
+ raise TypeError(
412
+ 'JSON array must contain only objects (dicts) '
413
+ 'when loading file',
414
+ )
415
+ raise TypeError(
416
+ 'JSON root must be an object or an array of objects '
417
+ 'when loading file',
418
+ )
419
+
420
+ def write_json(
421
+ self,
422
+ data: JSONData,
423
+ ) -> int:
424
+ """
425
+ Write ``data`` as formatted JSON to :attr:`path`.
426
+
427
+ Parameters
428
+ ----------
429
+ data : JSONData
430
+ Data to serialize as JSON.
431
+
432
+ Returns
433
+ -------
434
+ int
435
+ The number of records written to the JSON file.
436
+ """
437
+ self.path.parent.mkdir(parents=True, exist_ok=True)
438
+ with self.path.open('w', encoding='utf-8') as handle:
439
+ json.dump(
440
+ data,
441
+ handle,
442
+ indent=2,
443
+ ensure_ascii=False,
444
+ )
445
+ handle.write('\n')
446
+
447
+ return count_records(data)
448
+
449
+ # -- Instance Methods (XML) -- #
450
+
451
+ def read_xml(self) -> JSONDict:
452
+ """
453
+ Parse XML document at :attr:`path` into a nested dictionary.
454
+
455
+ Returns
456
+ -------
457
+ JSONDict
458
+ Nested dictionary representation of the XML file.
459
+ """
460
+ self._assert_exists()
461
+
462
+ tree = ET.parse(self.path)
463
+ root = tree.getroot()
464
+
465
+ return {root.tag: _element_to_dict(root)}
466
+
467
+ # -- Instance Methods (YAML) -- #
468
+
469
+ def _require_yaml(self) -> None:
470
+ """Ensure PyYAML is available or raise an informative error."""
471
+ _get_yaml()
472
+
473
+ def read_yaml(self) -> JSONData:
474
+ """
475
+ Load and validate YAML payloads from :attr:`path`.
476
+
477
+ Returns
478
+ -------
479
+ JSONData
480
+ The structured data read from the YAML file.
481
+
482
+ Raises
483
+ ------
484
+ TypeError
485
+ If the YAML root is not an object or an array of objects.
486
+ """
487
+ self._require_yaml()
488
+ self._assert_exists()
489
+
490
+ with self.path.open('r', encoding='utf-8') as handle:
491
+ loaded = _get_yaml().safe_load(handle)
492
+
493
+ if isinstance(loaded, dict):
494
+ return cast(JSONDict, loaded)
495
+ if isinstance(loaded, list):
496
+ if all(isinstance(item, dict) for item in loaded):
497
+ return cast(JSONList, loaded)
498
+ raise TypeError(
499
+ 'YAML array must contain only objects (dicts) when loading',
500
+ )
501
+ raise TypeError(
502
+ 'YAML root must be an object or an array of objects when loading',
503
+ )
504
+
505
+ def write_xml(
506
+ self,
507
+ data: JSONData,
508
+ *,
509
+ root_tag: str = _DEFAULT_XML_ROOT,
510
+ ) -> int:
511
+ """
512
+ Write ``data`` as XML to :attr:`path` and return record count.
513
+
514
+ Parameters
515
+ ----------
516
+ data : JSONData
517
+ Data to write as XML.
518
+ root_tag : str, optional
519
+ Root tag name to use when writing XML files. Defaults to
520
+ ``'root'``.
521
+
522
+ Returns
523
+ -------
524
+ int
525
+ The number of records written to the XML file.
526
+ """
527
+ if isinstance(data, dict) and len(data) == 1:
528
+ root_name, payload = next(iter(data.items()))
529
+ root_element = _dict_to_element(str(root_name), payload)
530
+ else:
531
+ root_element = _dict_to_element(root_tag, data)
532
+
533
+ tree = ET.ElementTree(root_element)
534
+ tree.write(self.path, encoding='utf-8', xml_declaration=True)
535
+
536
+ return count_records(data)
537
+
538
+ def write_yaml(
539
+ self,
540
+ data: JSONData,
541
+ ) -> int:
542
+ """
543
+ Write ``data`` as YAML to :attr:`path` and return record count.
544
+
545
+ Parameters
546
+ ----------
547
+ data : JSONData
548
+ Data to write as YAML.
549
+
550
+ Returns
551
+ -------
552
+ int
553
+ The number of records written.
554
+ """
555
+ self._require_yaml()
556
+ with self.path.open('w', encoding='utf-8') as handle:
557
+ _get_yaml().safe_dump(
558
+ data,
559
+ handle,
560
+ sort_keys=False,
561
+ allow_unicode=True,
562
+ default_flow_style=False,
563
+ )
564
+ return count_records(data)
565
+
566
+ # -- Class Methods -- #
567
+
568
+ @classmethod
569
+ def from_path(
570
+ cls,
571
+ path: StrPath,
572
+ *,
573
+ file_format: FileFormat | str | None = None,
574
+ ) -> File:
575
+ """
576
+ Create a :class:`File` from any path-like and optional format.
577
+
578
+ Parameters
579
+ ----------
580
+ path : StrPath
581
+ Path to the file on disk.
582
+ file_format : FileFormat | str | None, optional
583
+ Explicit format. If omitted, the format is inferred from the file
584
+ extension (``.csv``, ``.json``, or ``.xml``).
585
+
586
+ Returns
587
+ -------
588
+ File
589
+ The constructed :class:`File` instance.
590
+ """
591
+ resolved = Path(path)
592
+ ff: FileFormat | None
593
+ if isinstance(file_format, str):
594
+ ff = FileFormat.coerce(file_format)
595
+ else:
596
+ ff = file_format
597
+
598
+ return cls(resolved, ff)
599
+
600
+ @classmethod
601
+ def read_file(
602
+ cls,
603
+ path: StrPath,
604
+ file_format: FileFormat | str | None = None,
605
+ ) -> JSONData:
606
+ """
607
+ Read structured data.
608
+
609
+ Parameters
610
+ ----------
611
+ path : StrPath
612
+ Path to the file on disk.
613
+ file_format : FileFormat | str | None, optional
614
+ Explicit format. If omitted, the format is inferred from the file
615
+ extension (``.csv``, ``.json``, or ``.xml``).
616
+
617
+ Returns
618
+ -------
619
+ JSONData
620
+ The structured data read from the file.
621
+ """
622
+ return cls.from_path(path, file_format=file_format).read()
623
+
624
+ @classmethod
625
+ def write_file(
626
+ cls,
627
+ path: StrPath,
628
+ data: JSONData,
629
+ file_format: FileFormat | str | None = None,
630
+ *,
631
+ root_tag: str = _DEFAULT_XML_ROOT,
632
+ ) -> int:
633
+ """
634
+ Write structured data and count written records.
635
+
636
+ Parameters
637
+ ----------
638
+ path : StrPath
639
+ Path to the file on disk.
640
+ data : JSONData
641
+ Data to write to the file.
642
+ file_format : FileFormat | str | None, optional
643
+ Explicit format. If omitted, the format is inferred from the file
644
+ extension (``.csv``, ``.json``, or ``.xml``).
645
+ root_tag : str, optional
646
+ Root tag name to use when writing XML files. Defaults to
647
+ ``'root'``.
648
+
649
+ Returns
650
+ -------
651
+ int
652
+ The number of records written to the file.
653
+ """
654
+ return cls.from_path(path, file_format=file_format).write(
655
+ data,
656
+ root_tag=root_tag,
657
+ )