etlplus 0.12.9__py3-none-any.whl → 0.12.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus/file/accdb.py +78 -0
- etlplus/file/arrow.py +78 -0
- etlplus/file/avro.py +13 -1
- etlplus/file/bson.py +77 -0
- etlplus/file/cbor.py +78 -0
- etlplus/file/cfg.py +79 -0
- etlplus/file/conf.py +80 -0
- etlplus/file/csv.py +13 -1
- etlplus/file/dat.py +13 -1
- etlplus/file/dta.py +77 -0
- etlplus/file/duckdb.py +78 -0
- etlplus/file/enums.py +10 -4
- etlplus/file/feather.py +13 -1
- etlplus/file/fwf.py +12 -1
- etlplus/file/hbs.py +78 -0
- etlplus/file/hdf5.py +78 -0
- etlplus/file/ini.py +79 -0
- etlplus/file/ion.py +78 -0
- etlplus/file/jinja2.py +78 -0
- etlplus/file/json.py +13 -1
- etlplus/file/log.py +78 -0
- etlplus/file/mat.py +78 -0
- etlplus/file/mdb.py +78 -0
- etlplus/file/msgpack.py +78 -0
- etlplus/file/mustache.py +78 -0
- etlplus/file/nc.py +78 -0
- etlplus/file/ndjson.py +12 -6
- etlplus/file/numbers.py +75 -0
- etlplus/file/ods.py +79 -0
- etlplus/file/orc.py +13 -1
- etlplus/file/parquet.py +13 -1
- etlplus/file/pb.py +78 -0
- etlplus/file/pbf.py +77 -0
- etlplus/file/properties.py +78 -0
- etlplus/file/proto.py +77 -0
- etlplus/file/psv.py +14 -1
- etlplus/file/rda.py +78 -0
- etlplus/file/rds.py +78 -0
- etlplus/file/sas7bdat.py +78 -0
- etlplus/file/sav.py +77 -0
- etlplus/file/sqlite.py +78 -0
- etlplus/file/sylk.py +77 -0
- etlplus/file/tab.py +3 -4
- etlplus/file/toml.py +78 -0
- etlplus/file/tsv.py +14 -1
- etlplus/file/txt.py +11 -1
- etlplus/file/vm.py +78 -0
- etlplus/file/wks.py +77 -0
- etlplus/file/xlsm.py +79 -0
- etlplus/file/xml.py +12 -1
- etlplus/file/xpt.py +78 -0
- etlplus/file/yaml.py +12 -1
- etlplus/file/zsav.py +77 -0
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/METADATA +31 -1
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/RECORD +59 -22
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/WHEEL +0 -0
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/entry_points.txt +0 -0
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/licenses/LICENSE +0 -0
- {etlplus-0.12.9.dist-info → etlplus-0.12.11.dist-info}/top_level.txt +0 -0
etlplus/file/toml.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.toml` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Tom's Obvious Minimal Language (TOML) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A TOML file is a configuration file that uses the TOML syntax.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Simple key-value pairs.
|
|
11
|
+
- Nested tables and arrays.
|
|
12
|
+
- Data types such as strings, integers, floats, booleans, dates, and
|
|
13
|
+
arrays.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If the file follows the TOML specification, use this module for
|
|
16
|
+
reading and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read TOML content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the TOML file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the TOML file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='TOML')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to TOML at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the TOML file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as TOML. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the TOML file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='TOML')
|
etlplus/file/tsv.py
CHANGED
|
@@ -1,7 +1,20 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.tsv` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing TSV files.
|
|
4
|
+
Helpers for reading/writing Tab-Separated Values (TSV) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A TSV file is a plain text file that uses the tab character (``\t``) to
|
|
9
|
+
separate values.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Each line in the file represents a single record.
|
|
12
|
+
- The first line often contains headers that define the column names.
|
|
13
|
+
- Values may be enclosed in quotes, especially if they contain tabs
|
|
14
|
+
or special characters.
|
|
15
|
+
- Rule of thumb:
|
|
16
|
+
- If the file follows the TSV specification, use this module for
|
|
17
|
+
reading and writing.
|
|
5
18
|
"""
|
|
6
19
|
|
|
7
20
|
from __future__ import annotations
|
etlplus/file/txt.py
CHANGED
|
@@ -1,7 +1,17 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.txt` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing text files.
|
|
4
|
+
Helpers for reading/writing text (TXT) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A TXT file is a plain text file that contains unformatted text.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Each line in the file represents a single piece of text.
|
|
11
|
+
- Lines may vary in length and content.
|
|
12
|
+
- Rule of thumb:
|
|
13
|
+
- If the file is a simple text file without specific formatting
|
|
14
|
+
requirements, use this module for reading and writing.
|
|
5
15
|
"""
|
|
6
16
|
|
|
7
17
|
from __future__ import annotations
|
etlplus/file/vm.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.vm` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Apache Velocity (VM) template files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A VM file is a text file used for generating HTML or other text formats
|
|
9
|
+
by combining templates with data.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- HTML templates.
|
|
12
|
+
- Email templates.
|
|
13
|
+
- Configuration files.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If you need to work with Apache Velocity template files, use this module
|
|
16
|
+
for reading and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read VM content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the VM file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the VM file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='VM')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to VM file at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the VM file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as VM file. Should be a list of dictionaries or a single
|
|
71
|
+
dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the VM file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='VM')
|
etlplus/file/wks.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.wks` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Lotus 1-2-3 (WKS) spreadsheet files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A WKS file is a spreadsheet file created using the Lotus 1-2-3 format.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Reading data from legacy Lotus 1-2-3 spreadsheets.
|
|
11
|
+
- Writing data to Lotus 1-2-3 format for compatibility.
|
|
12
|
+
- Converting WKS files to more modern formats.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If you need to work with Lotus 1-2-3 spreadsheet files, use this module
|
|
15
|
+
for reading and writing.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
from ..types import JSONData
|
|
23
|
+
from ..types import JSONList
|
|
24
|
+
from . import stub
|
|
25
|
+
|
|
26
|
+
# SECTION: EXPORTS ========================================================== #
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
'read',
|
|
31
|
+
'write',
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def read(
|
|
39
|
+
path: Path,
|
|
40
|
+
) -> JSONList:
|
|
41
|
+
"""
|
|
42
|
+
Read WKS content from ``path``.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
path : Path
|
|
47
|
+
Path to the WKS file on disk.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
JSONList
|
|
52
|
+
The list of dictionaries read from the WKS file.
|
|
53
|
+
"""
|
|
54
|
+
return stub.read(path, format_name='WKS')
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def write(
|
|
58
|
+
path: Path,
|
|
59
|
+
data: JSONData,
|
|
60
|
+
) -> int:
|
|
61
|
+
"""
|
|
62
|
+
Write ``data`` to WKS file at ``path`` and return record count.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
path : Path
|
|
67
|
+
Path to the WKS file on disk.
|
|
68
|
+
data : JSONData
|
|
69
|
+
Data to write as WKS file. Should be a list of dictionaries or a
|
|
70
|
+
single dictionary.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
int
|
|
75
|
+
The number of rows written to the WKS file.
|
|
76
|
+
"""
|
|
77
|
+
return stub.write(path, data, format_name='WKS')
|
etlplus/file/xlsm.py
ADDED
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.xlsm` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing Microsoft Excel Macro-Enabled (XLSM) spreadsheet
|
|
5
|
+
files.
|
|
6
|
+
|
|
7
|
+
Notes
|
|
8
|
+
-----
|
|
9
|
+
- An XLSM file is a spreadsheet file created using the Microsoft Excel Macro-
|
|
10
|
+
Enabled (Open XML) format.
|
|
11
|
+
- Common cases:
|
|
12
|
+
- Reading data from Excel Macro-Enabled spreadsheets.
|
|
13
|
+
- Writing data to Excel Macro-Enabled format for compatibility.
|
|
14
|
+
- Converting XLSM files to more modern formats.
|
|
15
|
+
- Rule of thumb:
|
|
16
|
+
- If you need to work with Excel Macro-Enabled spreadsheet files, use this
|
|
17
|
+
module for reading and writing.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
from __future__ import annotations
|
|
21
|
+
|
|
22
|
+
from pathlib import Path
|
|
23
|
+
|
|
24
|
+
from ..types import JSONData
|
|
25
|
+
from ..types import JSONList
|
|
26
|
+
from . import stub
|
|
27
|
+
|
|
28
|
+
# SECTION: EXPORTS ========================================================== #
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
__all__ = [
|
|
32
|
+
'read',
|
|
33
|
+
'write',
|
|
34
|
+
]
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def read(
|
|
41
|
+
path: Path,
|
|
42
|
+
) -> JSONList:
|
|
43
|
+
"""
|
|
44
|
+
Read XLSM content from ``path``.
|
|
45
|
+
|
|
46
|
+
Parameters
|
|
47
|
+
----------
|
|
48
|
+
path : Path
|
|
49
|
+
Path to the XLSM file on disk.
|
|
50
|
+
|
|
51
|
+
Returns
|
|
52
|
+
-------
|
|
53
|
+
JSONList
|
|
54
|
+
The list of dictionaries read from the XLSM file.
|
|
55
|
+
"""
|
|
56
|
+
return stub.read(path, format_name='XLSM')
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def write(
|
|
60
|
+
path: Path,
|
|
61
|
+
data: JSONData,
|
|
62
|
+
) -> int:
|
|
63
|
+
"""
|
|
64
|
+
Write ``data`` to XLSM file at ``path`` and return record count.
|
|
65
|
+
|
|
66
|
+
Parameters
|
|
67
|
+
----------
|
|
68
|
+
path : Path
|
|
69
|
+
Path to the XLSM file on disk.
|
|
70
|
+
data : JSONData
|
|
71
|
+
Data to write as XLSM file. Should be a list of dictionaries or a
|
|
72
|
+
single dictionary.
|
|
73
|
+
|
|
74
|
+
Returns
|
|
75
|
+
-------
|
|
76
|
+
int
|
|
77
|
+
The number of rows written to the XLSM file.
|
|
78
|
+
"""
|
|
79
|
+
return stub.write(path, data, format_name='XLSM')
|
etlplus/file/xml.py
CHANGED
|
@@ -1,7 +1,18 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.xml` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing XML files.
|
|
4
|
+
Helpers for reading/writing Extensible Markup Language (XML) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- An XML file is a markup language file that uses tags to define elements.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Configuration files.
|
|
11
|
+
- Data interchange between systems.
|
|
12
|
+
- Document formatting.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If the file follows the XML specification, use this module for
|
|
15
|
+
reading and writing.
|
|
5
16
|
"""
|
|
6
17
|
|
|
7
18
|
from __future__ import annotations
|
etlplus/file/xpt.py
ADDED
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.xpt` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing SAS Transport (XPT) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A SAS Transport (XPT) file is a standardized file format used to transfer
|
|
9
|
+
SAS datasets between different systems.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Sharing datasets between different SAS installations.
|
|
12
|
+
- Archiving datasets in a platform-independent format.
|
|
13
|
+
- Importing/exporting data to/from statistical software that supports XPT.
|
|
14
|
+
- Rule of thumb:
|
|
15
|
+
- If you need to work with XPT files, use this module for reading
|
|
16
|
+
and writing.
|
|
17
|
+
"""
|
|
18
|
+
|
|
19
|
+
from __future__ import annotations
|
|
20
|
+
|
|
21
|
+
from pathlib import Path
|
|
22
|
+
|
|
23
|
+
from ..types import JSONData
|
|
24
|
+
from ..types import JSONList
|
|
25
|
+
from . import stub
|
|
26
|
+
|
|
27
|
+
# SECTION: EXPORTS ========================================================== #
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
__all__ = [
|
|
31
|
+
'read',
|
|
32
|
+
'write',
|
|
33
|
+
]
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def read(
|
|
40
|
+
path: Path,
|
|
41
|
+
) -> JSONList:
|
|
42
|
+
"""
|
|
43
|
+
Read XPT content from ``path``.
|
|
44
|
+
|
|
45
|
+
Parameters
|
|
46
|
+
----------
|
|
47
|
+
path : Path
|
|
48
|
+
Path to the XPT file on disk.
|
|
49
|
+
|
|
50
|
+
Returns
|
|
51
|
+
-------
|
|
52
|
+
JSONList
|
|
53
|
+
The list of dictionaries read from the XPT file.
|
|
54
|
+
"""
|
|
55
|
+
return stub.read(path, format_name='XPT')
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def write(
|
|
59
|
+
path: Path,
|
|
60
|
+
data: JSONData,
|
|
61
|
+
) -> int:
|
|
62
|
+
"""
|
|
63
|
+
Write ``data`` to XPT file at ``path`` and return record count.
|
|
64
|
+
|
|
65
|
+
Parameters
|
|
66
|
+
----------
|
|
67
|
+
path : Path
|
|
68
|
+
Path to the XPT file on disk.
|
|
69
|
+
data : JSONData
|
|
70
|
+
Data to write as XPT file. Should be a list of dictionaries or a
|
|
71
|
+
single dictionary.
|
|
72
|
+
|
|
73
|
+
Returns
|
|
74
|
+
-------
|
|
75
|
+
int
|
|
76
|
+
The number of rows written to the XPT file.
|
|
77
|
+
"""
|
|
78
|
+
return stub.write(path, data, format_name='XPT')
|
etlplus/file/yaml.py
CHANGED
|
@@ -1,7 +1,18 @@
|
|
|
1
1
|
"""
|
|
2
2
|
:mod:`etlplus.file.yaml` module.
|
|
3
3
|
|
|
4
|
-
Helpers for reading/writing YAML files.
|
|
4
|
+
Helpers for reading/writing YAML Ain't Markup Language (YAML) files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A YAML file is a human-readable data serialization format.
|
|
9
|
+
- Common cases:
|
|
10
|
+
- Configuration files.
|
|
11
|
+
- Data exchange between languages with different data structures.
|
|
12
|
+
- Complex data storage.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If the file follows the YAML specification, use this module for
|
|
15
|
+
reading and writing.
|
|
5
16
|
"""
|
|
6
17
|
|
|
7
18
|
from __future__ import annotations
|
etlplus/file/zsav.py
ADDED
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
"""
|
|
2
|
+
:mod:`etlplus.file.zsav` module.
|
|
3
|
+
|
|
4
|
+
Helpers for reading/writing compressed SPSS (ZSAV) data files.
|
|
5
|
+
|
|
6
|
+
Notes
|
|
7
|
+
-----
|
|
8
|
+
- A ZSAV file is a compressed binary file format used by SPSS to store
|
|
9
|
+
datasets, including variables, labels, and data types.
|
|
10
|
+
- Common cases:
|
|
11
|
+
- Reading compressed data for analysis in Python.
|
|
12
|
+
- Writing processed data back to compressed SPSS format.
|
|
13
|
+
- Rule of thumb:
|
|
14
|
+
- If you need to work with compressed SPSS data files, use this module for
|
|
15
|
+
reading and writing.
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
from __future__ import annotations
|
|
19
|
+
|
|
20
|
+
from pathlib import Path
|
|
21
|
+
|
|
22
|
+
from ..types import JSONData
|
|
23
|
+
from ..types import JSONList
|
|
24
|
+
from . import stub
|
|
25
|
+
|
|
26
|
+
# SECTION: EXPORTS ========================================================== #
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
__all__ = [
|
|
30
|
+
'read',
|
|
31
|
+
'write',
|
|
32
|
+
]
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
# SECTION: FUNCTIONS ======================================================== #
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def read(
|
|
39
|
+
path: Path,
|
|
40
|
+
) -> JSONList:
|
|
41
|
+
"""
|
|
42
|
+
Read ZSAV content from ``path``.
|
|
43
|
+
|
|
44
|
+
Parameters
|
|
45
|
+
----------
|
|
46
|
+
path : Path
|
|
47
|
+
Path to the ZSAV file on disk.
|
|
48
|
+
|
|
49
|
+
Returns
|
|
50
|
+
-------
|
|
51
|
+
JSONList
|
|
52
|
+
The list of dictionaries read from the ZSAV file.
|
|
53
|
+
"""
|
|
54
|
+
return stub.read(path, format_name='ZSAV')
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
def write(
|
|
58
|
+
path: Path,
|
|
59
|
+
data: JSONData,
|
|
60
|
+
) -> int:
|
|
61
|
+
"""
|
|
62
|
+
Write ``data`` to ZSAV file at ``path`` and return record count.
|
|
63
|
+
|
|
64
|
+
Parameters
|
|
65
|
+
----------
|
|
66
|
+
path : Path
|
|
67
|
+
Path to the ZSAV file on disk.
|
|
68
|
+
data : JSONData
|
|
69
|
+
Data to write as ZSAV file. Should be a list of dictionaries or a
|
|
70
|
+
single dictionary.
|
|
71
|
+
|
|
72
|
+
Returns
|
|
73
|
+
-------
|
|
74
|
+
int
|
|
75
|
+
The number of rows written to the ZSAV file.
|
|
76
|
+
"""
|
|
77
|
+
return stub.write(path, data, format_name='ZSAV')
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: etlplus
|
|
3
|
-
Version: 0.12.
|
|
3
|
+
Version: 0.12.11
|
|
4
4
|
Summary: A Swiss Army knife for simple ETL operations
|
|
5
5
|
Home-page: https://github.com/Dagitali/ETLPlus
|
|
6
6
|
Author: ETLPlus Team
|
|
@@ -77,8 +77,10 @@ package and command-line interface for data extraction, validation, transformati
|
|
|
77
77
|
- [Semi-Structured Text](#semi-structured-text)
|
|
78
78
|
- [Columnar / Analytics-Friendly](#columnar--analytics-friendly)
|
|
79
79
|
- [Binary Serialization and Interchange](#binary-serialization-and-interchange)
|
|
80
|
+
- [Databases and Embedded Storage](#databases-and-embedded-storage)
|
|
80
81
|
- [Spreadsheets](#spreadsheets)
|
|
81
82
|
- [Data Archives](#data-archives)
|
|
83
|
+
- [Logs and Event Streams](#logs-and-event-streams)
|
|
82
84
|
- [Usage](#usage)
|
|
83
85
|
- [Command Line Interface](#command-line-interface)
|
|
84
86
|
- [Argument Order and Required Options](#argument-order-and-required-options)
|
|
@@ -246,8 +248,13 @@ File formats are grouped as in `FileFormat`. Support is marked as:
|
|
|
246
248
|
|
|
247
249
|
| Format | Supported | Description |
|
|
248
250
|
| --- | --- | --- |
|
|
251
|
+
| `cfg` | N | Config-style key-value pairs |
|
|
252
|
+
| `conf` | N | Config-style key-value pairs |
|
|
253
|
+
| `ini` | N | Config-style key-value pairs |
|
|
249
254
|
| `json` | Y | JavaScript Object Notation |
|
|
250
255
|
| `ndjson` | Y | Newline-Delimited JSON |
|
|
256
|
+
| `properties` | N | Java-style key-value pairs |
|
|
257
|
+
| `toml` | N | Tom's Obvious Minimal Language |
|
|
251
258
|
| `xml` | Y | Extensible Markup Language |
|
|
252
259
|
| `yaml` | Y | YAML Ain't Markup Language |
|
|
253
260
|
|
|
@@ -255,6 +262,7 @@ File formats are grouped as in `FileFormat`. Support is marked as:
|
|
|
255
262
|
|
|
256
263
|
| Format | Supported | Description |
|
|
257
264
|
| --- | --- | --- |
|
|
265
|
+
| `arrow` | N | Apache Arrow IPC |
|
|
258
266
|
| `feather` | Y | Apache Arrow Feather |
|
|
259
267
|
| `orc` | Y | Optimized Row Columnar; common in Hadoop |
|
|
260
268
|
| `parquet` | Y | Apache Parquet; common in Big Data |
|
|
@@ -264,6 +272,22 @@ File formats are grouped as in `FileFormat`. Support is marked as:
|
|
|
264
272
|
| Format | Supported | Description |
|
|
265
273
|
| --- | --- | --- |
|
|
266
274
|
| `avro` | Y | Apache Avro |
|
|
275
|
+
| `bson` | N | Binary JSON; common with MongoDB exports/dumps |
|
|
276
|
+
| `cbor` | N | Concise Binary Object Representation |
|
|
277
|
+
| `ion` | N | Amazon Ion |
|
|
278
|
+
| `msgpack` | N | MessagePack |
|
|
279
|
+
| `pb` | N | Protocol Buffers (Google Protobuf) |
|
|
280
|
+
| `pbf` | N | Protocolbuffer Binary Format; often for GIS data |
|
|
281
|
+
| `proto` | N | Protocol Buffers schema; often in .pb / .bin |
|
|
282
|
+
|
|
283
|
+
#### Databases and Embedded Storage
|
|
284
|
+
|
|
285
|
+
| Format | Supported | Description |
|
|
286
|
+
| --- | --- | --- |
|
|
287
|
+
| `accdb` | N | Microsoft Access database file (newer format) |
|
|
288
|
+
| `duckdb` | N | DuckDB database file |
|
|
289
|
+
| `mdb` | N | Microsoft Access database file (older format) |
|
|
290
|
+
| `sqlite` | N | SQLite database file |
|
|
267
291
|
|
|
268
292
|
#### Spreadsheets
|
|
269
293
|
|
|
@@ -279,6 +303,12 @@ File formats are grouped as in `FileFormat`. Support is marked as:
|
|
|
279
303
|
| `gz` | Y | Gzip-compressed file |
|
|
280
304
|
| `zip` | Y | ZIP archive |
|
|
281
305
|
|
|
306
|
+
#### Logs and Event Streams
|
|
307
|
+
|
|
308
|
+
| Format | Supported | Description |
|
|
309
|
+
| --- | --- | --- |
|
|
310
|
+
| `log` | N | Generic log file |
|
|
311
|
+
|
|
282
312
|
## Usage
|
|
283
313
|
|
|
284
314
|
### Command Line Interface
|