etlplus 0.11.11__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
etlplus/file/feather.py CHANGED
@@ -1,48 +1,119 @@
1
1
  """
2
2
  :mod:`etlplus.file.feather` module.
3
3
 
4
- Stub helpers for FEATHER read/write.
4
+ Helpers for reading/writing Feather files.
5
5
  """
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  from pathlib import Path
10
+ from typing import Any
11
+ from typing import cast
10
12
 
11
13
  from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..types import JSONList
12
16
 
13
17
  # SECTION: EXPORTS ========================================================== #
14
18
 
15
19
 
16
- def read(path: Path) -> JSONData:
20
+ __all__ = [
21
+ 'read',
22
+ 'write',
23
+ ]
24
+
25
+
26
+ # SECTION: INTERNAL CONSTANTS =============================================== #
27
+
28
+
29
+ _PANDAS_CACHE: dict[str, Any] = {}
30
+
31
+
32
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
33
+
34
+
35
+ def _get_pandas() -> Any:
36
+ """
37
+ Return the pandas module, importing it on first use.
38
+
39
+ Raises an informative ImportError if the optional dependency is missing.
40
+ """
41
+ mod = _PANDAS_CACHE.get('mod')
42
+ if mod is not None: # pragma: no cover - tiny branch
43
+ return mod
44
+ try:
45
+ _pd = __import__('pandas') # type: ignore[assignment]
46
+ except ImportError as e: # pragma: no cover
47
+ raise ImportError(
48
+ 'Feather support requires optional dependency "pandas".\n'
49
+ 'Install with: pip install pandas',
50
+ ) from e
51
+ _PANDAS_CACHE['mod'] = _pd
52
+
53
+ return _pd
54
+
55
+
56
+ def _normalize_records(data: JSONData) -> JSONList:
17
57
  """
18
- Read FEATHER content from ``path``.
58
+ Normalize JSON payloads into a list of dictionaries.
59
+
60
+ Raises TypeError when payloads contain non-dict items.
61
+ """
62
+ if isinstance(data, list):
63
+ if not all(isinstance(item, dict) for item in data):
64
+ raise TypeError(
65
+ 'Feather payloads must contain only objects (dicts)',
66
+ )
67
+ return cast(JSONList, data)
68
+ return [cast(JSONDict, data)]
69
+
70
+
71
+ # SECTION: FUNCTIONS ======================================================== #
72
+
73
+
74
+ def read(
75
+ path: Path,
76
+ ) -> JSONList:
77
+ """
78
+ Read Feather content from ``path``.
19
79
 
20
80
  Parameters
21
81
  ----------
22
82
  path : Path
23
- Path to the FEATHER file on disk.
83
+ Path to the Feather file on disk.
24
84
 
25
85
  Returns
26
86
  -------
27
- JSONData
28
- Parsed payload.
87
+ JSONList
88
+ The list of dictionaries read from the Feather file.
29
89
 
30
90
  Raises
31
91
  ------
32
- NotImplementedError
33
- FEATHER :func:`read` is not implemented yet.
92
+ ImportError
93
+ When optional dependency "pyarrow" is missing.
34
94
  """
35
- raise NotImplementedError('FEATHER read is not implemented yet')
95
+ pandas = _get_pandas()
96
+ try:
97
+ frame = pandas.read_feather(path)
98
+ except ImportError as e: # pragma: no cover
99
+ raise ImportError(
100
+ 'Feather support requires optional dependency "pyarrow".\n'
101
+ 'Install with: pip install pyarrow',
102
+ ) from e
103
+ return cast(JSONList, frame.to_dict(orient='records'))
36
104
 
37
105
 
38
- def write(path: Path, data: JSONData) -> int:
106
+ def write(
107
+ path: Path,
108
+ data: JSONData,
109
+ ) -> int:
39
110
  """
40
- Write ``data`` to FEATHER at ``path``.
111
+ Write ``data`` to Feather at ``path`` and return record count.
41
112
 
42
113
  Parameters
43
114
  ----------
44
115
  path : Path
45
- Path to the FEATHER file on disk.
116
+ Path to the Feather file on disk.
46
117
  data : JSONData
47
118
  Data to write.
48
119
 
@@ -53,7 +124,21 @@ def write(path: Path, data: JSONData) -> int:
53
124
 
54
125
  Raises
55
126
  ------
56
- NotImplementedError
57
- FEATHER :func:`write` is not implemented yet.
127
+ ImportError
128
+ When optional dependency "pyarrow" is missing.
58
129
  """
59
- raise NotImplementedError('FEATHER write is not implemented yet')
130
+ records = _normalize_records(data)
131
+ if not records:
132
+ return 0
133
+
134
+ pandas = _get_pandas()
135
+ path.parent.mkdir(parents=True, exist_ok=True)
136
+ frame = pandas.DataFrame.from_records(records)
137
+ try:
138
+ frame.to_feather(path)
139
+ except ImportError as e: # pragma: no cover
140
+ raise ImportError(
141
+ 'Feather support requires optional dependency "pyarrow".\n'
142
+ 'Install with: pip install pyarrow',
143
+ ) from e
144
+ return len(records)
etlplus/file/gz.py CHANGED
@@ -1,21 +1,37 @@
1
1
  """
2
2
  :mod:`etlplus.file.gz` module.
3
3
 
4
- Stub helpers for GZ read/write.
4
+ Helpers for reading/writing GZ files.
5
5
  """
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
+ import gzip
10
+ import tempfile
9
11
  from pathlib import Path
10
12
 
11
13
  from ..types import JSONData
14
+ from .enums import CompressionFormat
15
+ from .enums import FileFormat
16
+ from .enums import infer_file_format_and_compression
12
17
 
13
18
  # SECTION: EXPORTS ========================================================== #
14
19
 
15
20
 
16
- def read(path: Path) -> JSONData:
21
+ __all__ = [
22
+ 'read',
23
+ 'write',
24
+ ]
25
+
26
+
27
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
28
+
29
+
30
+ def _resolve_format(
31
+ path: Path,
32
+ ) -> FileFormat:
17
33
  """
18
- Read GZ content from ``path``.
34
+ Resolve the inner file format from a .gz filename.
19
35
 
20
36
  Parameters
21
37
  ----------
@@ -24,20 +40,61 @@ def read(path: Path) -> JSONData:
24
40
 
25
41
  Returns
26
42
  -------
27
- JSONData
28
- Parsed payload.
43
+ FileFormat
44
+ The inferred inner file format.
29
45
 
30
46
  Raises
31
47
  ------
32
- NotImplementedError
33
- GZ :func:`read` is not implemented yet.
48
+ ValueError
49
+ If the file format cannot be inferred from the filename.
50
+ """
51
+ fmt, compression = infer_file_format_and_compression(path)
52
+ if compression is not CompressionFormat.GZ:
53
+ raise ValueError(f'Not a gzip file: {path}')
54
+ if fmt is None:
55
+ raise ValueError(
56
+ f'Cannot infer file format from compressed file {path!r}',
57
+ )
58
+ return fmt
59
+
60
+
61
+ # SECTION: FUNCTIONS ======================================================== #
62
+
63
+
64
+ def read(
65
+ path: Path,
66
+ ) -> JSONData:
67
+ """
68
+ Read GZ content from ``path`` and parse the inner payload.
69
+
70
+ Parameters
71
+ ----------
72
+ path : Path
73
+ Path to the GZ file on disk.
74
+
75
+ Returns
76
+ -------
77
+ JSONData
78
+ Parsed payload.
34
79
  """
35
- raise NotImplementedError('GZ read is not implemented yet')
80
+ fmt = _resolve_format(path)
81
+ with gzip.open(path, 'rb') as handle:
82
+ payload = handle.read()
83
+
84
+ with tempfile.TemporaryDirectory() as tmpdir:
85
+ tmp_path = Path(tmpdir) / f'payload.{fmt.value}'
86
+ tmp_path.write_bytes(payload)
87
+ from .core import File
36
88
 
89
+ return File(tmp_path, fmt).read()
37
90
 
38
- def write(path: Path, data: JSONData) -> int:
91
+
92
+ def write(
93
+ path: Path,
94
+ data: JSONData,
95
+ ) -> int:
39
96
  """
40
- Write ``data`` to GZ at ``path``.
97
+ Write ``data`` to GZ at ``path`` and return record count.
41
98
 
42
99
  Parameters
43
100
  ----------
@@ -50,10 +107,17 @@ def write(path: Path, data: JSONData) -> int:
50
107
  -------
51
108
  int
52
109
  Number of records written.
53
-
54
- Raises
55
- ------
56
- NotImplementedError
57
- GZ :func:`write` is not implemented yet.
58
110
  """
59
- raise NotImplementedError('GZ write is not implemented yet')
111
+ fmt = _resolve_format(path)
112
+ with tempfile.TemporaryDirectory() as tmpdir:
113
+ tmp_path = Path(tmpdir) / f'payload.{fmt.value}'
114
+ from .core import File
115
+
116
+ count = File(tmp_path, fmt).write(data)
117
+ payload = tmp_path.read_bytes()
118
+
119
+ path.parent.mkdir(parents=True, exist_ok=True)
120
+ with gzip.open(path, 'wb') as handle:
121
+ handle.write(payload)
122
+
123
+ return count
etlplus/file/json.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """
2
2
  :mod:`etlplus.file.json` module.
3
3
 
4
- JSON read/write helpers.
4
+ Helpers for reading/writing JSON files.
5
5
  """
6
6
 
7
7
  from __future__ import annotations
@@ -15,6 +15,15 @@ from ..types import JSONDict
15
15
  from ..types import JSONList
16
16
  from ..utils import count_records
17
17
 
18
+ # SECTION: EXPORTS ========================================================== #
19
+
20
+
21
+ __all__ = [
22
+ 'read',
23
+ 'write',
24
+ ]
25
+
26
+
18
27
  # SECTION: FUNCTIONS ======================================================== #
19
28
 
20
29
 
@@ -22,7 +31,9 @@ def read(
22
31
  path: Path,
23
32
  ) -> JSONData:
24
33
  """
25
- Load and validate JSON payloads from ``path``.
34
+ Read JSON content from ``path``.
35
+
36
+ Validates that the JSON root is a dict or a list of dicts.
26
37
 
27
38
  Parameters
28
39
  ----------
etlplus/file/ndjson.py CHANGED
@@ -1,19 +1,35 @@
1
1
  """
2
2
  :mod:`etlplus.file.ndjson` module.
3
3
 
4
- Stub helpers for NDJSON read/write.
4
+ Helpers for reading/writing NDJSON files.
5
5
  """
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
+ import json
9
10
  from pathlib import Path
11
+ from typing import cast
10
12
 
11
13
  from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..types import JSONList
16
+ from ..utils import count_records
12
17
 
13
18
  # SECTION: EXPORTS ========================================================== #
14
19
 
15
20
 
16
- def read(path: Path) -> JSONData:
21
+ __all__ = [
22
+ 'read',
23
+ 'write',
24
+ ]
25
+
26
+
27
+ # SECTION: FUNCTIONS ======================================================== #
28
+
29
+
30
+ def read(
31
+ path: Path,
32
+ ) -> JSONList:
17
33
  """
18
34
  Read NDJSON content from ``path``.
19
35
 
@@ -24,18 +40,33 @@ def read(path: Path) -> JSONData:
24
40
 
25
41
  Returns
26
42
  -------
27
- JSONData
28
- Parsed payload.
43
+ JSONList
44
+ The list of dictionaries read from the NDJSON file.
29
45
 
30
46
  Raises
31
47
  ------
32
- NotImplementedError
33
- NDJSON :func:`read` is not implemented yet.
48
+ TypeError
49
+ If any line in the NDJSON file is not a JSON object (dict).
34
50
  """
35
- raise NotImplementedError('NDJSON read is not implemented yet')
51
+ rows: JSONList = []
52
+ with path.open('r', encoding='utf-8') as handle:
53
+ for idx, line in enumerate(handle, start=1):
54
+ text = line.strip()
55
+ if not text:
56
+ continue
57
+ payload = json.loads(text)
58
+ if not isinstance(payload, dict):
59
+ raise TypeError(
60
+ f'NDJSON lines must be objects (dicts) (line {idx})',
61
+ )
62
+ rows.append(cast(JSONDict, payload))
63
+ return rows
36
64
 
37
65
 
38
- def write(path: Path, data: JSONData) -> int:
66
+ def write(
67
+ path: Path,
68
+ data: JSONData,
69
+ ) -> int:
39
70
  """
40
71
  Write ``data`` to NDJSON at ``path``.
41
72
 
@@ -53,7 +84,26 @@ def write(path: Path, data: JSONData) -> int:
53
84
 
54
85
  Raises
55
86
  ------
56
- NotImplementedError
57
- NDJSON :func:`write` is not implemented yet.
87
+ TypeError
88
+ If ``data`` is a list containing non-dict items.
58
89
  """
59
- raise NotImplementedError('NDJSON write is not implemented yet')
90
+ rows: JSONList
91
+ if isinstance(data, list):
92
+ if not all(isinstance(item, dict) for item in data):
93
+ raise TypeError(
94
+ 'NDJSON payloads must contain only objects (dicts)',
95
+ )
96
+ rows = cast(JSONList, data)
97
+ else:
98
+ rows = [cast(JSONDict, data)]
99
+
100
+ if not rows:
101
+ return 0
102
+
103
+ path.parent.mkdir(parents=True, exist_ok=True)
104
+ with path.open('w', encoding='utf-8') as handle:
105
+ for row in rows:
106
+ handle.write(json.dumps(row, ensure_ascii=False))
107
+ handle.write('\n')
108
+
109
+ return count_records(rows)
etlplus/file/orc.py CHANGED
@@ -1,19 +1,77 @@
1
1
  """
2
2
  :mod:`etlplus.file.orc` module.
3
3
 
4
- Stub helpers for ORC read/write.
4
+ Helpers for reading/writing ORC files.
5
5
  """
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  from pathlib import Path
10
+ from typing import Any
11
+ from typing import cast
10
12
 
11
13
  from ..types import JSONData
14
+ from ..types import JSONDict
15
+ from ..types import JSONList
12
16
 
13
17
  # SECTION: EXPORTS ========================================================== #
14
18
 
15
19
 
16
- def read(path: Path) -> JSONData:
20
+ __all__ = [
21
+ 'read',
22
+ 'write',
23
+ ]
24
+
25
+
26
+ # SECTION: INTERNAL CONSTANTS =============================================== #
27
+
28
+
29
+ _PANDAS_CACHE: dict[str, Any] = {}
30
+
31
+
32
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
33
+
34
+
35
+ def _get_pandas() -> Any:
36
+ """
37
+ Return the pandas module, importing it on first use.
38
+
39
+ Raises an informative ImportError if the optional dependency is missing.
40
+ """
41
+ mod = _PANDAS_CACHE.get('mod')
42
+ if mod is not None: # pragma: no cover - tiny branch
43
+ return mod
44
+ try:
45
+ _pd = __import__('pandas') # type: ignore[assignment]
46
+ except ImportError as e: # pragma: no cover
47
+ raise ImportError(
48
+ 'ORC support requires optional dependency "pandas".\n'
49
+ 'Install with: pip install pandas',
50
+ ) from e
51
+ _PANDAS_CACHE['mod'] = _pd
52
+
53
+ return _pd
54
+
55
+
56
+ def _normalize_records(data: JSONData) -> JSONList:
57
+ """
58
+ Normalize JSON payloads into a list of dictionaries.
59
+
60
+ Raises TypeError when payloads contain non-dict items.
61
+ """
62
+ if isinstance(data, list):
63
+ if not all(isinstance(item, dict) for item in data):
64
+ raise TypeError('ORC payloads must contain only objects (dicts)')
65
+ return cast(JSONList, data)
66
+ return [cast(JSONDict, data)]
67
+
68
+
69
+ # SECTION: FUNCTIONS ======================================================== #
70
+
71
+
72
+ def read(
73
+ path: Path,
74
+ ) -> JSONList:
17
75
  """
18
76
  Read ORC content from ``path``.
19
77
 
@@ -24,20 +82,31 @@ def read(path: Path) -> JSONData:
24
82
 
25
83
  Returns
26
84
  -------
27
- JSONData
28
- Parsed payload.
85
+ JSONList
86
+ The list of dictionaries read from the ORC file.
29
87
 
30
88
  Raises
31
89
  ------
32
- NotImplementedError
33
- ORC :func:`read` is not implemented yet.
90
+ ImportError
91
+ When optional dependency "pyarrow" is missing.
34
92
  """
35
- raise NotImplementedError('ORC read is not implemented yet')
93
+ pandas = _get_pandas()
94
+ try:
95
+ frame = pandas.read_orc(path)
96
+ except ImportError as e: # pragma: no cover
97
+ raise ImportError(
98
+ 'ORC support requires optional dependency "pyarrow".\n'
99
+ 'Install with: pip install pyarrow',
100
+ ) from e
101
+ return cast(JSONList, frame.to_dict(orient='records'))
36
102
 
37
103
 
38
- def write(path: Path, data: JSONData) -> int:
104
+ def write(
105
+ path: Path,
106
+ data: JSONData,
107
+ ) -> int:
39
108
  """
40
- Write ``data`` to ORC at ``path``.
109
+ Write ``data`` to ORC at ``path`` and return record count.
41
110
 
42
111
  Parameters
43
112
  ----------
@@ -53,7 +122,21 @@ def write(path: Path, data: JSONData) -> int:
53
122
 
54
123
  Raises
55
124
  ------
56
- NotImplementedError
57
- ORC :func:`write` is not implemented yet.
125
+ ImportError
126
+ When optional dependency "pyarrow" is missing.
58
127
  """
59
- raise NotImplementedError('ORC write is not implemented yet')
128
+ records = _normalize_records(data)
129
+ if not records:
130
+ return 0
131
+
132
+ pandas = _get_pandas()
133
+ path.parent.mkdir(parents=True, exist_ok=True)
134
+ frame = pandas.DataFrame.from_records(records)
135
+ try:
136
+ frame.to_orc(path, index=False)
137
+ except ImportError as e: # pragma: no cover
138
+ raise ImportError(
139
+ 'ORC support requires optional dependency "pyarrow".\n'
140
+ 'Install with: pip install pyarrow',
141
+ ) from e
142
+ return len(records)