etlplus 0.12.3__py3-none-any.whl → 0.12.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus/file/_imports.py +141 -0
- etlplus/file/_io.py +1 -0
- etlplus/file/accdb.py +78 -0
- etlplus/file/arrow.py +78 -0
- etlplus/file/avro.py +17 -27
- etlplus/file/bson.py +77 -0
- etlplus/file/cbor.py +78 -0
- etlplus/file/cfg.py +79 -0
- etlplus/file/conf.py +80 -0
- etlplus/file/core.py +119 -84
- etlplus/file/csv.py +13 -1
- etlplus/file/dat.py +78 -0
- etlplus/file/duckdb.py +78 -0
- etlplus/file/enums.py +114 -15
- etlplus/file/feather.py +14 -2
- etlplus/file/fwf.py +77 -0
- etlplus/file/ini.py +79 -0
- etlplus/file/ion.py +78 -0
- etlplus/file/json.py +13 -1
- etlplus/file/log.py +78 -0
- etlplus/file/mdb.py +78 -0
- etlplus/file/msgpack.py +78 -0
- etlplus/file/ndjson.py +14 -15
- etlplus/file/orc.py +14 -2
- etlplus/file/parquet.py +14 -2
- etlplus/file/pb.py +78 -0
- etlplus/file/pbf.py +77 -0
- etlplus/file/properties.py +78 -0
- etlplus/file/proto.py +77 -0
- etlplus/file/psv.py +79 -0
- etlplus/file/sqlite.py +78 -0
- etlplus/file/stub.py +84 -0
- etlplus/file/tab.py +81 -0
- etlplus/file/toml.py +78 -0
- etlplus/file/tsv.py +14 -1
- etlplus/file/txt.py +13 -10
- etlplus/file/xls.py +1 -1
- etlplus/file/xlsx.py +1 -1
- etlplus/file/xml.py +12 -1
- etlplus/file/yaml.py +15 -44
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/METADATA +119 -1
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/RECORD +46 -23
- etlplus/file/_pandas.py +0 -58
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/WHEEL +0 -0
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/entry_points.txt +0 -0
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/licenses/LICENSE +0 -0
- {etlplus-0.12.3.dist-info → etlplus-0.12.10.dist-info}/top_level.txt +0 -0
etlplus/file/ini.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.ini` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing initialization (INI) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An INI file is a simple configuration file format that uses sections,
|
|
9
|
+
properties, and values.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Sections are denoted by square brackets (e.g., ``[section]``).
|
|
12
|
+
- Properties are key-value pairs (e.g., ``key=value``).
|
|
13
|
+
- Comments are often indicated by semicolons (``;``) or hash symbols
|
|
14
|
+
(``#``).
|
|
15
|
+
- Rule of thumb:
|
|
16
|
+
- If the file follows the INI specification, use this module for
|
|
17
|
+
reading and writing.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from ..types import JSONData
|
|
25
|
+
from ..types import JSONList
|
|
26
|
+
from . import stub
|
|
27
|
+
|
|
28
|
+
# SECTION: EXPORTS ========================================================== #
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
'read',
|
|
33
|
+
'write',
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def read(
|
|
41
|
+
path: Path,
|
|
42
|
+
) -> JSONList:
|
|
43
|
+
"""
|
|
44
|
+
Read INI content from ``path``.
|
|
45
|
+
|
|
46
|
+
Parameters
|
|
47
|
+
----------
|
|
48
|
+
path : Path
|
|
49
|
+
Path to the INI file on disk.
|
|
50
|
+
|
|
51
|
+
Returns
|
|
52
|
+
-------
|
|
53
|
+
JSONList
|
|
54
|
+
The list of dictionaries read from the INI file.
|
|
55
|
+
"""
|
|
56
|
+
return stub.read(path, format_name='INI')
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def write(
|
|
60
|
+
path: Path,
|
|
61
|
+
data: JSONData,
|
|
62
|
+
) -> int:
|
|
63
|
+
"""
|
|
64
|
+
Write ``data`` to INI at ``path`` and return record count.
|
|
65
|
+
|
|
66
|
+
Parameters
|
|
67
|
+
----------
|
|
68
|
+
path : Path
|
|
69
|
+
Path to the INI file on disk.
|
|
70
|
+
data : JSONData
|
|
71
|
+
Data to write as INI. Should be a list of dictionaries or a
|
|
72
|
+
single dictionary.
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
int
|
|
77
|
+
The number of rows written to the INI file.
|
|
78
|
+
"""
|
|
79
|
+
return stub.write(path, data, format_name='INI')
|
etlplus/file/ion.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.ion` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Amazon Ion (ION) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An ION file is a richly-typed, self-describing data format developed by
|
|
9
|
+
Amazon, designed for efficient data interchange and storage.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Data serialization for distributed systems.
|
|
12
|
+
- Interoperability between different programming languages.
|
|
13
|
+
- Handling of complex data types beyond standard JSON capabilities.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the Amazon Ion specification, use this module for
|
|
16
|
+
reading and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read ION content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the ION file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the ION file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='ION')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to ION at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the ION file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as ION. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the ION file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='ION')
|
etlplus/file/json.py
CHANGED
|
@@ -1,7 +1,19 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.json` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing JSON files.
|
|
4
|
+
Helpers for reading/writing JavaScript Object Notation (JSON) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A JSON file is a widely used data interchange format that uses
|
|
9
|
+
human-readable text to represent structured data.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Data interchange between web applications and servers.
|
|
12
|
+
- Configuration files for applications.
|
|
13
|
+
- Data storage for NoSQL databases.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the JSON specification, use this module for
|
|
16
|
+
reading and writing.
|
|
5
17
|
"""
|
|
6
18
|
|
|
7
19
|
from __future__ import annotations
|
etlplus/file/log.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.log` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing generic log (LOG) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A LOG file is a plain text file that contains log messages generated by
|
|
9
|
+
applications or systems.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Each line in the file represents a single log entry.
|
|
12
|
+
- Log entries may include timestamps, log levels, and messages.
|
|
13
|
+
- Formats may vary widely depending on the application generating the logs.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file is a generic log file, use this module for reading and
|
|
16
|
+
writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read LOG content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the LOG file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the LOG file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='LOG')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to LOG at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the LOG file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as LOG. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the LOG file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='LOG')
|
etlplus/file/mdb.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.mdb` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing newer Microsoft Access database (MDB) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An MDB file is a proprietary database file format used by Microsoft Access
|
|
9
|
+
2003 and earlier.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Storing relational data for small to medium-sized applications.
|
|
12
|
+
- Desktop database applications.
|
|
13
|
+
- Data management for non-enterprise solutions.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the MDB specification, use this module for reading
|
|
16
|
+
and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read CSV content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the CSV file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the CSV file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='DAT')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to CSV at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the CSV file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as CSV. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the CSV file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='DAT')
|
etlplus/file/msgpack.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.msgpack` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing MessagePack (MSGPACK) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A MsgPack file is a binary serialization format that is more compact than
|
|
9
|
+
JSON.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Efficient data storage and transmission.
|
|
12
|
+
- Inter-process communication.
|
|
13
|
+
- Data serialization in performance-critical applications.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the MsgPack specification, use this module for
|
|
16
|
+
reading and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read MsgPack content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the MsgPack file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the MsgPack file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='MSGPACK')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to MsgPack at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the MsgPack file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as MsgPack. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the MsgPack file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='MSGPACK')
|
etlplus/file/ndjson.py
CHANGED
|
@@ -1,7 +1,18 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.ndjson` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing NDJSON files.
|
|
4
|
+
Helpers for reading/writing Newline Delimited JSON (NDJSON) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An NDJSON file is a format where each line is a separate JSON object.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Streaming JSON data.
|
|
11
|
+
- Log files with JSON entries.
|
|
12
|
+
- Large datasets that are processed line-by-line.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If the file follows the NDJSON specification, use this module for
|
|
15
|
+
reading and writing.
|
|
5
16
|
"""
|
|
6
17
|
|
|
7
18
|
from __future__ import annotations
|
|
@@ -14,6 +25,7 @@ from ..types import JSONData
|
|
|
14
25
|
from ..types import JSONDict
|
|
15
26
|
from ..types import JSONList
|
|
16
27
|
from ..utils import count_records
|
|
28
|
+
from ._io import normalize_records
|
|
17
29
|
|
|
18
30
|
# SECTION: EXPORTS ========================================================== #
|
|
19
31
|
|
|
@@ -81,21 +93,8 @@ def write(
|
|
|
81
93
|
-------
|
|
82
94
|
int
|
|
83
95
|
Number of records written.
|
|
84
|
-
|
|
85
|
-
Raises
|
|
86
|
-
------
|
|
87
|
-
TypeError
|
|
88
|
-
If ``data`` is a list containing non-dict items.
|
|
89
96
|
"""
|
|
90
|
-
rows
|
|
91
|
-
if isinstance(data, list):
|
|
92
|
-
if not all(isinstance(item, dict) for item in data):
|
|
93
|
-
raise TypeError(
|
|
94
|
-
'NDJSON payloads must contain only objects (dicts)',
|
|
95
|
-
)
|
|
96
|
-
rows = cast(JSONList, data)
|
|
97
|
-
else:
|
|
98
|
-
rows = [cast(JSONDict, data)]
|
|
97
|
+
rows = normalize_records(data, 'NDJSON')
|
|
99
98
|
|
|
100
99
|
if not rows:
|
|
101
100
|
return 0
|
etlplus/file/orc.py
CHANGED
|
@@ -1,7 +1,19 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.orc` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing ORC files.
|
|
4
|
+
Helpers for reading/writing Optimized Row Columnar (ORC) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An ORC file is a columnar storage file format optimized for Big Data
|
|
9
|
+
processing.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Efficient storage and retrieval of large datasets.
|
|
12
|
+
- Integration with big data frameworks like Apache Hive and Apache Spark.
|
|
13
|
+
- Compression and performance optimization for analytical queries.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the ORC specification, use this module for reading
|
|
16
|
+
and writing.
|
|
5
17
|
"""
|
|
6
18
|
|
|
7
19
|
from __future__ import annotations
|
|
@@ -11,8 +23,8 @@ from typing import cast
|
|
|
11
23
|
|
|
12
24
|
from ..types import JSONData
|
|
13
25
|
from ..types import JSONList
|
|
26
|
+
from ._imports import get_pandas
|
|
14
27
|
from ._io import normalize_records
|
|
15
|
-
from ._pandas import get_pandas
|
|
16
28
|
|
|
17
29
|
# SECTION: EXPORTS ========================================================== #
|
|
18
30
|
|
etlplus/file/parquet.py
CHANGED
|
@@ -1,7 +1,19 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.parquet` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing Parquet files.
|
|
4
|
+
Helpers for reading/writing Apache Parquet (PARQUET) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An Apache Parquet file is a columnar storage file format optimized for Big
|
|
9
|
+
Data processing.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Efficient storage and retrieval of large datasets.
|
|
12
|
+
- Integration with big data frameworks like Apache Hive and Apache Spark.
|
|
13
|
+
- Compression and performance optimization for analytical queries.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the Apache Parquet specification, use this module for
|
|
16
|
+
reading and writing.
|
|
5
17
|
"""
|
|
6
18
|
|
|
7
19
|
from __future__ import annotations
|
|
@@ -11,8 +23,8 @@ from typing import cast
|
|
|
11
23
|
|
|
12
24
|
from ..types import JSONData
|
|
13
25
|
from ..types import JSONList
|
|
26
|
+
from ._imports import get_pandas
|
|
14
27
|
from ._io import normalize_records
|
|
15
|
-
from ._pandas import get_pandas
|
|
16
28
|
|
|
17
29
|
# SECTION: EXPORTS ========================================================== #
|
|
18
30
|
|
etlplus/file/pb.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.pb` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Protocol Buffer (PB) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- PB (a.k.a. Protobuff) is a binary serialization format developed by Google
|
|
9
|
+
for structured data.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Data interchange between services.
|
|
12
|
+
- Efficient storage of structured data.
|
|
13
|
+
- Communication in distributed systems.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the Protocol Buffer specification, use this module
|
|
16
|
+
for reading and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read PB content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the PB file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the PB file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='PB')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to PB at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the PB file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as PB. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the PB file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='PB')
|
etlplus/file/pbf.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.pbf` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Protocolbuffer Binary Format (PBF) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- PBF is a binary format used primarily for OpenStreetMap (OSM) data.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Efficient storage of large OSM datasets.
|
|
11
|
+
- Fast data interchange for mapping applications.
|
|
12
|
+
- Compression of OSM data for reduced file size.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If the file follows the PBF specification, use this module for reading
|
|
15
|
+
and writing.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
from ..types import JSONData
|
|
23
|
+
from ..types import JSONList
|
|
24
|
+
from . import stub
|
|
25
|
+
|
|
26
|
+
# SECTION: EXPORTS ========================================================== #
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
'read',
|
|
31
|
+
'write',
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def read(
|
|
39
|
+
path: Path,
|
|
40
|
+
) -> JSONList:
|
|
41
|
+
"""
|
|
42
|
+
Read PBF content from ``path``.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
path : Path
|
|
47
|
+
Path to the PBF file on disk.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
JSONList
|
|
52
|
+
The list of dictionaries read from the PBF file.
|
|
53
|
+
"""
|
|
54
|
+
return stub.read(path, format_name='PBF')
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def write(
|
|
58
|
+
path: Path,
|
|
59
|
+
data: JSONData,
|
|
60
|
+
) -> int:
|
|
61
|
+
"""
|
|
62
|
+
Write ``data`` to PBF at ``path`` and return record count.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
path : Path
|
|
67
|
+
Path to the PBF file on disk.
|
|
68
|
+
data : JSONData
|
|
69
|
+
Data to write as PBF. Should be a list of dictionaries or a
|
|
70
|
+
single dictionary.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
int
|
|
75
|
+
The number of rows written to the PBF file.
|
|
76
|
+
"""
|
|
77
|
+
return stub.write(path, data, format_name='PBF')
|