etlplus 0.9.2__py3-none-any.whl → 0.10.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- etlplus/__init__.py +26 -1
- etlplus/api/README.md +3 -51
- etlplus/api/__init__.py +0 -10
- etlplus/api/config.py +28 -39
- etlplus/api/endpoint_client.py +3 -3
- etlplus/api/pagination/client.py +1 -1
- etlplus/api/rate_limiting/config.py +1 -13
- etlplus/api/rate_limiting/rate_limiter.py +11 -8
- etlplus/api/request_manager.py +6 -11
- etlplus/api/transport.py +2 -14
- etlplus/api/types.py +6 -96
- etlplus/cli/commands.py +43 -76
- etlplus/cli/constants.py +1 -1
- etlplus/cli/handlers.py +12 -40
- etlplus/cli/io.py +2 -2
- etlplus/cli/main.py +1 -1
- etlplus/cli/state.py +7 -4
- etlplus/{workflow → config}/__init__.py +23 -10
- etlplus/{workflow → config}/connector.py +44 -58
- etlplus/{workflow → config}/jobs.py +32 -105
- etlplus/{workflow → config}/pipeline.py +51 -59
- etlplus/{workflow → config}/profile.py +5 -8
- etlplus/config/types.py +204 -0
- etlplus/config/utils.py +120 -0
- etlplus/database/ddl.py +1 -1
- etlplus/database/engine.py +3 -19
- etlplus/database/orm.py +0 -2
- etlplus/database/schema.py +1 -1
- etlplus/enums.py +288 -0
- etlplus/{ops/extract.py → extract.py} +99 -81
- etlplus/file.py +652 -0
- etlplus/{ops/load.py → load.py} +101 -78
- etlplus/{ops/run.py → run.py} +127 -159
- etlplus/{api/utils.py → run_helpers.py} +153 -209
- etlplus/{ops/transform.py → transform.py} +68 -75
- etlplus/types.py +4 -5
- etlplus/utils.py +2 -136
- etlplus/{ops/validate.py → validate.py} +12 -22
- etlplus/validation/__init__.py +44 -0
- etlplus/{ops → validation}/utils.py +17 -53
- {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/METADATA +17 -210
- etlplus-0.10.2.dist-info/RECORD +65 -0
- {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/WHEEL +1 -1
- etlplus/README.md +0 -37
- etlplus/api/enums.py +0 -51
- etlplus/cli/README.md +0 -40
- etlplus/database/README.md +0 -48
- etlplus/file/README.md +0 -105
- etlplus/file/__init__.py +0 -25
- etlplus/file/_imports.py +0 -141
- etlplus/file/_io.py +0 -160
- etlplus/file/accdb.py +0 -78
- etlplus/file/arrow.py +0 -78
- etlplus/file/avro.py +0 -176
- etlplus/file/bson.py +0 -77
- etlplus/file/cbor.py +0 -78
- etlplus/file/cfg.py +0 -79
- etlplus/file/conf.py +0 -80
- etlplus/file/core.py +0 -322
- etlplus/file/csv.py +0 -79
- etlplus/file/dat.py +0 -78
- etlplus/file/dta.py +0 -77
- etlplus/file/duckdb.py +0 -78
- etlplus/file/enums.py +0 -343
- etlplus/file/feather.py +0 -111
- etlplus/file/fwf.py +0 -77
- etlplus/file/gz.py +0 -123
- etlplus/file/hbs.py +0 -78
- etlplus/file/hdf5.py +0 -78
- etlplus/file/ini.py +0 -79
- etlplus/file/ion.py +0 -78
- etlplus/file/jinja2.py +0 -78
- etlplus/file/json.py +0 -98
- etlplus/file/log.py +0 -78
- etlplus/file/mat.py +0 -78
- etlplus/file/mdb.py +0 -78
- etlplus/file/msgpack.py +0 -78
- etlplus/file/mustache.py +0 -78
- etlplus/file/nc.py +0 -78
- etlplus/file/ndjson.py +0 -108
- etlplus/file/numbers.py +0 -75
- etlplus/file/ods.py +0 -79
- etlplus/file/orc.py +0 -111
- etlplus/file/parquet.py +0 -113
- etlplus/file/pb.py +0 -78
- etlplus/file/pbf.py +0 -77
- etlplus/file/properties.py +0 -78
- etlplus/file/proto.py +0 -77
- etlplus/file/psv.py +0 -79
- etlplus/file/rda.py +0 -78
- etlplus/file/rds.py +0 -78
- etlplus/file/sas7bdat.py +0 -78
- etlplus/file/sav.py +0 -77
- etlplus/file/sqlite.py +0 -78
- etlplus/file/stub.py +0 -84
- etlplus/file/sylk.py +0 -77
- etlplus/file/tab.py +0 -81
- etlplus/file/toml.py +0 -78
- etlplus/file/tsv.py +0 -80
- etlplus/file/txt.py +0 -102
- etlplus/file/vm.py +0 -78
- etlplus/file/wks.py +0 -77
- etlplus/file/xls.py +0 -88
- etlplus/file/xlsm.py +0 -79
- etlplus/file/xlsx.py +0 -99
- etlplus/file/xml.py +0 -185
- etlplus/file/xpt.py +0 -78
- etlplus/file/yaml.py +0 -95
- etlplus/file/zip.py +0 -175
- etlplus/file/zsav.py +0 -77
- etlplus/ops/README.md +0 -50
- etlplus/ops/__init__.py +0 -61
- etlplus/templates/README.md +0 -46
- etlplus/workflow/README.md +0 -52
- etlplus/workflow/dag.py +0 -105
- etlplus/workflow/types.py +0 -115
- etlplus-0.9.2.dist-info/RECORD +0 -134
- {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/entry_points.txt +0 -0
- {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/licenses/LICENSE +0 -0
- {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/top_level.txt +0 -0
etlplus/file/ods.py
DELETED
|
@@ -1,79 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.ods` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing OpenDocument (ODS) spreadsheet files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- An ODS file is a spreadsheet file created using the OpenDocument format.
|
|
9
|
-
- Common cases:
|
|
10
|
-
- Spreadsheet files created by LibreOffice Calc, Apache OpenOffice Calc, or
|
|
11
|
-
other applications that support the OpenDocument format.
|
|
12
|
-
- Spreadsheet files exchanged in open standards environments.
|
|
13
|
-
- Spreadsheet files used in government or educational institutions
|
|
14
|
-
promoting open formats.
|
|
15
|
-
- Rule of thumb:
|
|
16
|
-
- If the file follows the OpenDocument specification, use this module for
|
|
17
|
-
reading and writing.
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
from __future__ import annotations
|
|
21
|
-
|
|
22
|
-
from pathlib import Path
|
|
23
|
-
|
|
24
|
-
from ..types import JSONData
|
|
25
|
-
from ..types import JSONList
|
|
26
|
-
from . import stub
|
|
27
|
-
|
|
28
|
-
# SECTION: EXPORTS ========================================================== #
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
__all__ = [
|
|
32
|
-
'read',
|
|
33
|
-
'write',
|
|
34
|
-
]
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def read(
|
|
41
|
-
path: Path,
|
|
42
|
-
) -> JSONList:
|
|
43
|
-
"""
|
|
44
|
-
Read ODS content from ``path``.
|
|
45
|
-
|
|
46
|
-
Parameters
|
|
47
|
-
----------
|
|
48
|
-
path : Path
|
|
49
|
-
Path to the ODS file on disk.
|
|
50
|
-
|
|
51
|
-
Returns
|
|
52
|
-
-------
|
|
53
|
-
JSONList
|
|
54
|
-
The list of dictionaries read from the ODS file.
|
|
55
|
-
"""
|
|
56
|
-
return stub.read(path, format_name='ODS')
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def write(
|
|
60
|
-
path: Path,
|
|
61
|
-
data: JSONData,
|
|
62
|
-
) -> int:
|
|
63
|
-
"""
|
|
64
|
-
Write ``data`` to ODS file at ``path`` and return record count.
|
|
65
|
-
|
|
66
|
-
Parameters
|
|
67
|
-
----------
|
|
68
|
-
path : Path
|
|
69
|
-
Path to the ODS file on disk.
|
|
70
|
-
data : JSONData
|
|
71
|
-
Data to write as ODS file. Should be a list of dictionaries or a
|
|
72
|
-
single dictionary.
|
|
73
|
-
|
|
74
|
-
Returns
|
|
75
|
-
-------
|
|
76
|
-
int
|
|
77
|
-
The number of rows written to the ODS file.
|
|
78
|
-
"""
|
|
79
|
-
return stub.write(path, data, format_name='ODS')
|
etlplus/file/orc.py
DELETED
|
@@ -1,111 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.orc` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Optimized Row Columnar (ORC) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- An ORC file is a columnar storage file format optimized for Big Data
|
|
9
|
-
processing.
|
|
10
|
-
- Common cases:
|
|
11
|
-
- Efficient storage and retrieval of large datasets.
|
|
12
|
-
- Integration with big data frameworks like Apache Hive and Apache Spark.
|
|
13
|
-
- Compression and performance optimization for analytical queries.
|
|
14
|
-
- Rule of thumb:
|
|
15
|
-
- If the file follows the ORC specification, use this module for reading
|
|
16
|
-
and writing.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
from typing import cast
|
|
23
|
-
|
|
24
|
-
from ..types import JSONData
|
|
25
|
-
from ..types import JSONList
|
|
26
|
-
from ._imports import get_pandas
|
|
27
|
-
from ._io import normalize_records
|
|
28
|
-
|
|
29
|
-
# SECTION: EXPORTS ========================================================== #
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
__all__ = [
|
|
33
|
-
'read',
|
|
34
|
-
'write',
|
|
35
|
-
]
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def read(
|
|
42
|
-
path: Path,
|
|
43
|
-
) -> JSONList:
|
|
44
|
-
"""
|
|
45
|
-
Read ORC content from ``path``.
|
|
46
|
-
|
|
47
|
-
Parameters
|
|
48
|
-
----------
|
|
49
|
-
path : Path
|
|
50
|
-
Path to the ORC file on disk.
|
|
51
|
-
|
|
52
|
-
Returns
|
|
53
|
-
-------
|
|
54
|
-
JSONList
|
|
55
|
-
The list of dictionaries read from the ORC file.
|
|
56
|
-
|
|
57
|
-
Raises
|
|
58
|
-
------
|
|
59
|
-
ImportError
|
|
60
|
-
When optional dependency "pyarrow" is missing.
|
|
61
|
-
"""
|
|
62
|
-
pandas = get_pandas('ORC')
|
|
63
|
-
try:
|
|
64
|
-
frame = pandas.read_orc(path)
|
|
65
|
-
except ImportError as e: # pragma: no cover
|
|
66
|
-
raise ImportError(
|
|
67
|
-
'ORC support requires optional dependency "pyarrow".\n'
|
|
68
|
-
'Install with: pip install pyarrow',
|
|
69
|
-
) from e
|
|
70
|
-
return cast(JSONList, frame.to_dict(orient='records'))
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
def write(
|
|
74
|
-
path: Path,
|
|
75
|
-
data: JSONData,
|
|
76
|
-
) -> int:
|
|
77
|
-
"""
|
|
78
|
-
Write ``data`` to ORC at ``path`` and return record count.
|
|
79
|
-
|
|
80
|
-
Parameters
|
|
81
|
-
----------
|
|
82
|
-
path : Path
|
|
83
|
-
Path to the ORC file on disk.
|
|
84
|
-
data : JSONData
|
|
85
|
-
Data to write.
|
|
86
|
-
|
|
87
|
-
Returns
|
|
88
|
-
-------
|
|
89
|
-
int
|
|
90
|
-
Number of records written.
|
|
91
|
-
|
|
92
|
-
Raises
|
|
93
|
-
------
|
|
94
|
-
ImportError
|
|
95
|
-
When optional dependency "pyarrow" is missing.
|
|
96
|
-
"""
|
|
97
|
-
records = normalize_records(data, 'ORC')
|
|
98
|
-
if not records:
|
|
99
|
-
return 0
|
|
100
|
-
|
|
101
|
-
pandas = get_pandas('ORC')
|
|
102
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
103
|
-
frame = pandas.DataFrame.from_records(records)
|
|
104
|
-
try:
|
|
105
|
-
frame.to_orc(path, index=False)
|
|
106
|
-
except ImportError as e: # pragma: no cover
|
|
107
|
-
raise ImportError(
|
|
108
|
-
'ORC support requires optional dependency "pyarrow".\n'
|
|
109
|
-
'Install with: pip install pyarrow',
|
|
110
|
-
) from e
|
|
111
|
-
return len(records)
|
etlplus/file/parquet.py
DELETED
|
@@ -1,113 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.parquet` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Apache Parquet (PARQUET) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- An Apache Parquet file is a columnar storage file format optimized for Big
|
|
9
|
-
Data processing.
|
|
10
|
-
- Common cases:
|
|
11
|
-
- Efficient storage and retrieval of large datasets.
|
|
12
|
-
- Integration with big data frameworks like Apache Hive and Apache Spark.
|
|
13
|
-
- Compression and performance optimization for analytical queries.
|
|
14
|
-
- Rule of thumb:
|
|
15
|
-
- If the file follows the Apache Parquet specification, use this module for
|
|
16
|
-
reading and writing.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
from typing import cast
|
|
23
|
-
|
|
24
|
-
from ..types import JSONData
|
|
25
|
-
from ..types import JSONList
|
|
26
|
-
from ._imports import get_pandas
|
|
27
|
-
from ._io import normalize_records
|
|
28
|
-
|
|
29
|
-
# SECTION: EXPORTS ========================================================== #
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
__all__ = [
|
|
33
|
-
'read',
|
|
34
|
-
'write',
|
|
35
|
-
]
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
def read(
|
|
42
|
-
path: Path,
|
|
43
|
-
) -> JSONList:
|
|
44
|
-
"""
|
|
45
|
-
Read Parquet content from ``path``.
|
|
46
|
-
|
|
47
|
-
Parameters
|
|
48
|
-
----------
|
|
49
|
-
path : Path
|
|
50
|
-
Path to the PARQUET file on disk.
|
|
51
|
-
|
|
52
|
-
Returns
|
|
53
|
-
-------
|
|
54
|
-
JSONList
|
|
55
|
-
The list of dictionaries read from the Parquet file.
|
|
56
|
-
|
|
57
|
-
Raises
|
|
58
|
-
------
|
|
59
|
-
ImportError
|
|
60
|
-
If optional dependencies for Parquet support are missing.
|
|
61
|
-
"""
|
|
62
|
-
pandas = get_pandas('Parquet')
|
|
63
|
-
try:
|
|
64
|
-
frame = pandas.read_parquet(path)
|
|
65
|
-
except ImportError as e: # pragma: no cover
|
|
66
|
-
raise ImportError(
|
|
67
|
-
'Parquet support requires optional dependency '
|
|
68
|
-
'"pyarrow" or "fastparquet".\n'
|
|
69
|
-
'Install with: pip install pyarrow',
|
|
70
|
-
) from e
|
|
71
|
-
return cast(JSONList, frame.to_dict(orient='records'))
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def write(
|
|
75
|
-
path: Path,
|
|
76
|
-
data: JSONData,
|
|
77
|
-
) -> int:
|
|
78
|
-
"""
|
|
79
|
-
Write ``data`` to Parquet at ``path`` and return record count.
|
|
80
|
-
|
|
81
|
-
Parameters
|
|
82
|
-
----------
|
|
83
|
-
path : Path
|
|
84
|
-
Path to the PARQUET file on disk.
|
|
85
|
-
data : JSONData
|
|
86
|
-
Data to write.
|
|
87
|
-
|
|
88
|
-
Returns
|
|
89
|
-
-------
|
|
90
|
-
int
|
|
91
|
-
Number of records written.
|
|
92
|
-
|
|
93
|
-
Raises
|
|
94
|
-
------
|
|
95
|
-
ImportError
|
|
96
|
-
If optional dependencies for Parquet support are missing.
|
|
97
|
-
"""
|
|
98
|
-
records = normalize_records(data, 'Parquet')
|
|
99
|
-
if not records:
|
|
100
|
-
return 0
|
|
101
|
-
|
|
102
|
-
pandas = get_pandas('Parquet')
|
|
103
|
-
path.parent.mkdir(parents=True, exist_ok=True)
|
|
104
|
-
frame = pandas.DataFrame.from_records(records)
|
|
105
|
-
try:
|
|
106
|
-
frame.to_parquet(path, index=False)
|
|
107
|
-
except ImportError as e: # pragma: no cover
|
|
108
|
-
raise ImportError(
|
|
109
|
-
'Parquet support requires optional dependency '
|
|
110
|
-
'"pyarrow" or "fastparquet".\n'
|
|
111
|
-
'Install with: pip install pyarrow',
|
|
112
|
-
) from e
|
|
113
|
-
return len(records)
|
etlplus/file/pb.py
DELETED
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.pb` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Protocol Buffer (PB) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- PB (a.k.a. Protobuff) is a binary serialization format developed by Google
|
|
9
|
-
for structured data.
|
|
10
|
-
- Common cases:
|
|
11
|
-
- Data interchange between services.
|
|
12
|
-
- Efficient storage of structured data.
|
|
13
|
-
- Communication in distributed systems.
|
|
14
|
-
- Rule of thumb:
|
|
15
|
-
- If the file follows the Protocol Buffer specification, use this module
|
|
16
|
-
for reading and writing.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
|
|
23
|
-
from ..types import JSONData
|
|
24
|
-
from ..types import JSONList
|
|
25
|
-
from . import stub
|
|
26
|
-
|
|
27
|
-
# SECTION: EXPORTS ========================================================== #
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
__all__ = [
|
|
31
|
-
'read',
|
|
32
|
-
'write',
|
|
33
|
-
]
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def read(
|
|
40
|
-
path: Path,
|
|
41
|
-
) -> JSONList:
|
|
42
|
-
"""
|
|
43
|
-
Read PB content from ``path``.
|
|
44
|
-
|
|
45
|
-
Parameters
|
|
46
|
-
----------
|
|
47
|
-
path : Path
|
|
48
|
-
Path to the PB file on disk.
|
|
49
|
-
|
|
50
|
-
Returns
|
|
51
|
-
-------
|
|
52
|
-
JSONList
|
|
53
|
-
The list of dictionaries read from the PB file.
|
|
54
|
-
"""
|
|
55
|
-
return stub.read(path, format_name='PB')
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def write(
|
|
59
|
-
path: Path,
|
|
60
|
-
data: JSONData,
|
|
61
|
-
) -> int:
|
|
62
|
-
"""
|
|
63
|
-
Write ``data`` to PB at ``path`` and return record count.
|
|
64
|
-
|
|
65
|
-
Parameters
|
|
66
|
-
----------
|
|
67
|
-
path : Path
|
|
68
|
-
Path to the PB file on disk.
|
|
69
|
-
data : JSONData
|
|
70
|
-
Data to write as PB. Should be a list of dictionaries or a
|
|
71
|
-
single dictionary.
|
|
72
|
-
|
|
73
|
-
Returns
|
|
74
|
-
-------
|
|
75
|
-
int
|
|
76
|
-
The number of rows written to the PB file.
|
|
77
|
-
"""
|
|
78
|
-
return stub.write(path, data, format_name='PB')
|
etlplus/file/pbf.py
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.pbf` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Protocolbuffer Binary Format (PBF) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- PBF is a binary format used primarily for OpenStreetMap (OSM) data.
|
|
9
|
-
- Common cases:
|
|
10
|
-
- Efficient storage of large OSM datasets.
|
|
11
|
-
- Fast data interchange for mapping applications.
|
|
12
|
-
- Compression of OSM data for reduced file size.
|
|
13
|
-
- Rule of thumb:
|
|
14
|
-
- If the file follows the PBF specification, use this module for reading
|
|
15
|
-
and writing.
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
from __future__ import annotations
|
|
19
|
-
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
from ..types import JSONData
|
|
23
|
-
from ..types import JSONList
|
|
24
|
-
from . import stub
|
|
25
|
-
|
|
26
|
-
# SECTION: EXPORTS ========================================================== #
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
__all__ = [
|
|
30
|
-
'read',
|
|
31
|
-
'write',
|
|
32
|
-
]
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def read(
|
|
39
|
-
path: Path,
|
|
40
|
-
) -> JSONList:
|
|
41
|
-
"""
|
|
42
|
-
Read PBF content from ``path``.
|
|
43
|
-
|
|
44
|
-
Parameters
|
|
45
|
-
----------
|
|
46
|
-
path : Path
|
|
47
|
-
Path to the PBF file on disk.
|
|
48
|
-
|
|
49
|
-
Returns
|
|
50
|
-
-------
|
|
51
|
-
JSONList
|
|
52
|
-
The list of dictionaries read from the PBF file.
|
|
53
|
-
"""
|
|
54
|
-
return stub.read(path, format_name='PBF')
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def write(
|
|
58
|
-
path: Path,
|
|
59
|
-
data: JSONData,
|
|
60
|
-
) -> int:
|
|
61
|
-
"""
|
|
62
|
-
Write ``data`` to PBF at ``path`` and return record count.
|
|
63
|
-
|
|
64
|
-
Parameters
|
|
65
|
-
----------
|
|
66
|
-
path : Path
|
|
67
|
-
Path to the PBF file on disk.
|
|
68
|
-
data : JSONData
|
|
69
|
-
Data to write as PBF. Should be a list of dictionaries or a
|
|
70
|
-
single dictionary.
|
|
71
|
-
|
|
72
|
-
Returns
|
|
73
|
-
-------
|
|
74
|
-
int
|
|
75
|
-
The number of rows written to the PBF file.
|
|
76
|
-
"""
|
|
77
|
-
return stub.write(path, data, format_name='PBF')
|
etlplus/file/properties.py
DELETED
|
@@ -1,78 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.properties` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing properties (PROPERTIES) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- A PROPERTIES file is a properties file that typically uses key-value pairs,
|
|
9
|
-
often with a simple syntax.
|
|
10
|
-
- Common cases:
|
|
11
|
-
- Java-style properties files with ``key=value`` pairs.
|
|
12
|
-
- INI-style files without sections.
|
|
13
|
-
- Custom formats specific to certain applications.
|
|
14
|
-
- Rule of thumb:
|
|
15
|
-
- If the file follows a standard format like INI, consider using
|
|
16
|
-
dedicated parsers.
|
|
17
|
-
"""
|
|
18
|
-
|
|
19
|
-
from __future__ import annotations
|
|
20
|
-
|
|
21
|
-
from pathlib import Path
|
|
22
|
-
|
|
23
|
-
from ..types import JSONData
|
|
24
|
-
from ..types import JSONList
|
|
25
|
-
from . import stub
|
|
26
|
-
|
|
27
|
-
# SECTION: EXPORTS ========================================================== #
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
__all__ = [
|
|
31
|
-
'read',
|
|
32
|
-
'write',
|
|
33
|
-
]
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
def read(
|
|
40
|
-
path: Path,
|
|
41
|
-
) -> JSONList:
|
|
42
|
-
"""
|
|
43
|
-
Read PROPERTIES content from ``path``.
|
|
44
|
-
|
|
45
|
-
Parameters
|
|
46
|
-
----------
|
|
47
|
-
path : Path
|
|
48
|
-
Path to the PROPERTIES file on disk.
|
|
49
|
-
|
|
50
|
-
Returns
|
|
51
|
-
-------
|
|
52
|
-
JSONList
|
|
53
|
-
The list of dictionaries read from the PROPERTIES file.
|
|
54
|
-
"""
|
|
55
|
-
return stub.read(path, format_name='PROPERTIES')
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def write(
|
|
59
|
-
path: Path,
|
|
60
|
-
data: JSONData,
|
|
61
|
-
) -> int:
|
|
62
|
-
"""
|
|
63
|
-
Write ``data`` to PROPERTIES at ``path`` and return record count.
|
|
64
|
-
|
|
65
|
-
Parameters
|
|
66
|
-
----------
|
|
67
|
-
path : Path
|
|
68
|
-
Path to the PROPERTIES file on disk.
|
|
69
|
-
data : JSONData
|
|
70
|
-
Data to write as PROPERTIES. Should be a list of dictionaries or a
|
|
71
|
-
single dictionary.
|
|
72
|
-
|
|
73
|
-
Returns
|
|
74
|
-
-------
|
|
75
|
-
int
|
|
76
|
-
The number of rows written to the PROPERTIES file.
|
|
77
|
-
"""
|
|
78
|
-
return stub.write(path, data, format_name='PROPERTIES')
|
etlplus/file/proto.py
DELETED
|
@@ -1,77 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.proto` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Protocol Buffers schema (PROTO) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- A PROTO file defines the structure of Protocol Buffers messages.
|
|
9
|
-
- Common cases:
|
|
10
|
-
- Defining message formats for data interchange.
|
|
11
|
-
- Generating code for serialization/deserialization.
|
|
12
|
-
- Documenting data structures in distributed systems.
|
|
13
|
-
- Rule of thumb:
|
|
14
|
-
- If the file follows the Protocol Buffers schema specification, use this
|
|
15
|
-
module for reading and writing.
|
|
16
|
-
"""
|
|
17
|
-
|
|
18
|
-
from __future__ import annotations
|
|
19
|
-
|
|
20
|
-
from pathlib import Path
|
|
21
|
-
|
|
22
|
-
from ..types import JSONData
|
|
23
|
-
from ..types import JSONList
|
|
24
|
-
from . import stub
|
|
25
|
-
|
|
26
|
-
# SECTION: EXPORTS ========================================================== #
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
__all__ = [
|
|
30
|
-
'read',
|
|
31
|
-
'write',
|
|
32
|
-
]
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
def read(
|
|
39
|
-
path: Path,
|
|
40
|
-
) -> JSONList:
|
|
41
|
-
"""
|
|
42
|
-
Read PROTO content from ``path``.
|
|
43
|
-
|
|
44
|
-
Parameters
|
|
45
|
-
----------
|
|
46
|
-
path : Path
|
|
47
|
-
Path to the PROTO file on disk.
|
|
48
|
-
|
|
49
|
-
Returns
|
|
50
|
-
-------
|
|
51
|
-
JSONList
|
|
52
|
-
The list of dictionaries read from the PROTO file.
|
|
53
|
-
"""
|
|
54
|
-
return stub.read(path, format_name='PROTO')
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
def write(
|
|
58
|
-
path: Path,
|
|
59
|
-
data: JSONData,
|
|
60
|
-
) -> int:
|
|
61
|
-
"""
|
|
62
|
-
Write ``data`` to PROTO at ``path`` and return record count.
|
|
63
|
-
|
|
64
|
-
Parameters
|
|
65
|
-
----------
|
|
66
|
-
path : Path
|
|
67
|
-
Path to the PROTO file on disk.
|
|
68
|
-
data : JSONData
|
|
69
|
-
Data to write as PROTO. Should be a list of dictionaries or a
|
|
70
|
-
single dictionary.
|
|
71
|
-
|
|
72
|
-
Returns
|
|
73
|
-
-------
|
|
74
|
-
int
|
|
75
|
-
The number of rows written to the PROTO file.
|
|
76
|
-
"""
|
|
77
|
-
return stub.write(path, data, format_name='PROTO')
|
etlplus/file/psv.py
DELETED
|
@@ -1,79 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
:mod:`etlplus.file.psv` module.
|
|
3
|
-
|
|
4
|
-
Helpers for reading/writing Pipe-Separated Values (PSV) files.
|
|
5
|
-
|
|
6
|
-
Notes
|
|
7
|
-
-----
|
|
8
|
-
- A PSV file is a plain text file that uses the pipe character (`|`) to
|
|
9
|
-
separate values.
|
|
10
|
-
- Common cases:
|
|
11
|
-
- Each line in the file represents a single record.
|
|
12
|
-
- The first line often contains headers that define the column names.
|
|
13
|
-
- Values may be enclosed in quotes, especially if they contain pipes
|
|
14
|
-
or special characters.
|
|
15
|
-
- Rule of thumb:
|
|
16
|
-
- If the file follows the PSV specification, use this module for
|
|
17
|
-
reading and writing.
|
|
18
|
-
"""
|
|
19
|
-
|
|
20
|
-
from __future__ import annotations
|
|
21
|
-
|
|
22
|
-
from pathlib import Path
|
|
23
|
-
|
|
24
|
-
from ..types import JSONData
|
|
25
|
-
from ..types import JSONList
|
|
26
|
-
from . import stub
|
|
27
|
-
|
|
28
|
-
# SECTION: EXPORTS ========================================================== #
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
__all__ = [
|
|
32
|
-
'read',
|
|
33
|
-
'write',
|
|
34
|
-
]
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
# SECTION: FUNCTIONS ======================================================== #
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def read(
|
|
41
|
-
path: Path,
|
|
42
|
-
) -> JSONList:
|
|
43
|
-
"""
|
|
44
|
-
Read PSV content from ``path``.
|
|
45
|
-
|
|
46
|
-
Parameters
|
|
47
|
-
----------
|
|
48
|
-
path : Path
|
|
49
|
-
Path to the PSV file on disk.
|
|
50
|
-
|
|
51
|
-
Returns
|
|
52
|
-
-------
|
|
53
|
-
JSONList
|
|
54
|
-
The list of dictionaries read from the PSV file.
|
|
55
|
-
"""
|
|
56
|
-
return stub.read(path, format_name='PSV')
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
def write(
|
|
60
|
-
path: Path,
|
|
61
|
-
data: JSONData,
|
|
62
|
-
) -> int:
|
|
63
|
-
"""
|
|
64
|
-
Write ``data`` to PSV file at ``path`` and return record count.
|
|
65
|
-
|
|
66
|
-
Parameters
|
|
67
|
-
----------
|
|
68
|
-
path : Path
|
|
69
|
-
Path to the PSV file on disk.
|
|
70
|
-
data : JSONData
|
|
71
|
-
Data to write as PSV file. Should be a list of dictionaries or a
|
|
72
|
-
single dictionary.
|
|
73
|
-
|
|
74
|
-
Returns
|
|
75
|
-
-------
|
|
76
|
-
int
|
|
77
|
-
The number of rows written to the PSV file.
|
|
78
|
-
"""
|
|
79
|
-
return stub.write(path, data, format_name='PSV')
|