etlplus 0.17.2__py3-none-any.whl → 0.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. etlplus/file/_imports.py +35 -20
  2. etlplus/file/_io.py +138 -15
  3. etlplus/file/_r.py +48 -0
  4. etlplus/file/_sql.py +224 -0
  5. etlplus/file/accdb.py +7 -6
  6. etlplus/file/arrow.py +13 -24
  7. etlplus/file/avro.py +13 -10
  8. etlplus/file/bson.py +61 -22
  9. etlplus/file/cbor.py +13 -25
  10. etlplus/file/cfg.py +7 -6
  11. etlplus/file/conf.py +7 -6
  12. etlplus/file/core.py +1 -1
  13. etlplus/file/csv.py +8 -7
  14. etlplus/file/dat.py +9 -6
  15. etlplus/file/dta.py +15 -30
  16. etlplus/file/duckdb.py +29 -122
  17. etlplus/file/feather.py +15 -30
  18. etlplus/file/fwf.py +16 -14
  19. etlplus/file/gz.py +12 -7
  20. etlplus/file/hbs.py +7 -6
  21. etlplus/file/hdf5.py +31 -6
  22. etlplus/file/ini.py +17 -24
  23. etlplus/file/ion.py +7 -6
  24. etlplus/file/jinja2.py +7 -6
  25. etlplus/file/json.py +10 -11
  26. etlplus/file/log.py +7 -6
  27. etlplus/file/mat.py +7 -6
  28. etlplus/file/mdb.py +7 -6
  29. etlplus/file/msgpack.py +13 -25
  30. etlplus/file/mustache.py +7 -6
  31. etlplus/file/nc.py +30 -21
  32. etlplus/file/ndjson.py +10 -6
  33. etlplus/file/numbers.py +7 -6
  34. etlplus/file/ods.py +10 -6
  35. etlplus/file/orc.py +15 -30
  36. etlplus/file/parquet.py +10 -6
  37. etlplus/file/pb.py +22 -23
  38. etlplus/file/pbf.py +7 -6
  39. etlplus/file/properties.py +15 -29
  40. etlplus/file/proto.py +14 -20
  41. etlplus/file/psv.py +8 -7
  42. etlplus/file/rda.py +19 -51
  43. etlplus/file/rds.py +19 -51
  44. etlplus/file/sas7bdat.py +10 -30
  45. etlplus/file/sav.py +13 -24
  46. etlplus/file/sqlite.py +25 -83
  47. etlplus/file/stub.py +8 -6
  48. etlplus/file/sylk.py +7 -6
  49. etlplus/file/tab.py +8 -7
  50. etlplus/file/toml.py +14 -17
  51. etlplus/file/tsv.py +8 -7
  52. etlplus/file/txt.py +10 -7
  53. etlplus/file/vm.py +7 -6
  54. etlplus/file/wks.py +7 -6
  55. etlplus/file/xls.py +8 -5
  56. etlplus/file/xlsm.py +10 -6
  57. etlplus/file/xlsx.py +10 -6
  58. etlplus/file/xml.py +11 -9
  59. etlplus/file/xpt.py +13 -33
  60. etlplus/file/yaml.py +10 -11
  61. etlplus/file/zip.py +10 -5
  62. etlplus/file/zsav.py +7 -6
  63. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/METADATA +1 -1
  64. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/RECORD +68 -66
  65. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/WHEEL +0 -0
  66. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/entry_points.txt +0 -0
  67. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/licenses/LICENSE +0 -0
  68. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/top_level.txt +0 -0
etlplus/file/arrow.py CHANGED
@@ -18,13 +18,14 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
- from typing import Any
23
21
  from typing import cast
24
22
 
25
23
  from ..types import JSONData
26
24
  from ..types import JSONList
27
- from ._imports import get_optional_module
25
+ from ..types import StrPath
26
+ from ._imports import get_dependency
27
+ from ._io import coerce_path
28
+ from ._io import ensure_parent_dir
28
29
  from ._io import normalize_records
29
30
 
30
31
  # SECTION: EXPORTS ========================================================== #
@@ -37,32 +38,18 @@ __all__ = [
37
38
  ]
38
39
 
39
40
 
40
- # SECTION: INTERNAL FUNCTIONS =============================================== #
41
-
42
-
43
- def _get_pyarrow() -> Any:
44
- """Return the pyarrow module, importing it on first use."""
45
- return get_optional_module(
46
- 'pyarrow',
47
- error_message=(
48
- 'ARROW support requires optional dependency "pyarrow".\n'
49
- 'Install with: pip install pyarrow'
50
- ),
51
- )
52
-
53
-
54
41
  # SECTION: FUNCTIONS ======================================================== #
55
42
 
56
43
 
57
44
  def read(
58
- path: Path,
45
+ path: StrPath,
59
46
  ) -> JSONList:
60
47
  """
61
48
  Read ARROW content from *path*.
62
49
 
63
50
  Parameters
64
51
  ----------
65
- path : Path
52
+ path : StrPath
66
53
  Path to the Apache Arrow file on disk.
67
54
 
68
55
  Returns
@@ -70,7 +57,8 @@ def read(
70
57
  JSONList
71
58
  The list of dictionaries read from the Apache Arrow file.
72
59
  """
73
- pyarrow = _get_pyarrow()
60
+ path = coerce_path(path)
61
+ pyarrow = get_dependency('pyarrow', format_name='ARROW')
74
62
  with pyarrow.memory_map(str(path), 'r') as source:
75
63
  reader = pyarrow.ipc.open_file(source)
76
64
  table = reader.read_all()
@@ -78,7 +66,7 @@ def read(
78
66
 
79
67
 
80
68
  def write(
81
- path: Path,
69
+ path: StrPath,
82
70
  data: JSONData,
83
71
  ) -> int:
84
72
  """
@@ -86,7 +74,7 @@ def write(
86
74
 
87
75
  Parameters
88
76
  ----------
89
- path : Path
77
+ path : StrPath
90
78
  Path to the ARROW file on disk.
91
79
  data : JSONData
92
80
  Data to write as ARROW. Should be a list of dictionaries or a
@@ -97,13 +85,14 @@ def write(
97
85
  int
98
86
  The number of rows written to the ARROW file.
99
87
  """
88
+ path = coerce_path(path)
100
89
  records = normalize_records(data, 'ARROW')
101
90
  if not records:
102
91
  return 0
103
92
 
104
- pyarrow = _get_pyarrow()
93
+ pyarrow = get_dependency('pyarrow', format_name='ARROW')
105
94
  table = pyarrow.Table.from_pylist(records)
106
- path.parent.mkdir(parents=True, exist_ok=True)
95
+ ensure_parent_dir(path)
107
96
  with pyarrow.OSFile(str(path), 'wb') as sink:
108
97
  with pyarrow.ipc.new_file(sink, table.schema) as writer:
109
98
  writer.write_table(table)
etlplus/file/avro.py CHANGED
@@ -18,15 +18,16 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
21
  from typing import Any
23
22
  from typing import cast
24
23
 
25
- from etlplus.file._imports import get_fastavro
26
-
27
24
  from ..types import JSONData
28
25
  from ..types import JSONDict
29
26
  from ..types import JSONList
27
+ from ..types import StrPath
28
+ from ._imports import get_dependency
29
+ from ._io import coerce_path
30
+ from ._io import ensure_parent_dir
30
31
  from ._io import normalize_records
31
32
 
32
33
  # SECTION: EXPORTS ========================================================== #
@@ -122,14 +123,14 @@ def _merge_types(types: list[str]) -> str | list[str]:
122
123
 
123
124
 
124
125
  def read(
125
- path: Path,
126
+ path: StrPath,
126
127
  ) -> JSONList:
127
128
  """
128
129
  Read AVRO content from *path*.
129
130
 
130
131
  Parameters
131
132
  ----------
132
- path : Path
133
+ path : StrPath
133
134
  Path to the AVRO file on disk.
134
135
 
135
136
  Returns
@@ -137,14 +138,15 @@ def read(
137
138
  JSONList
138
139
  The list of dictionaries read from the AVRO file.
139
140
  """
140
- fastavro = get_fastavro()
141
+ path = coerce_path(path)
142
+ fastavro = get_dependency('fastavro', format_name='AVRO')
141
143
  with path.open('rb') as handle:
142
144
  reader = fastavro.reader(handle)
143
145
  return [cast(JSONDict, record) for record in reader]
144
146
 
145
147
 
146
148
  def write(
147
- path: Path,
149
+ path: StrPath,
148
150
  data: JSONData,
149
151
  ) -> int:
150
152
  """
@@ -152,7 +154,7 @@ def write(
152
154
 
153
155
  Parameters
154
156
  ----------
155
- path : Path
157
+ path : StrPath
156
158
  Path to the AVRO file on disk.
157
159
  data : JSONData
158
160
  Data to write.
@@ -162,15 +164,16 @@ def write(
162
164
  int
163
165
  Number of records written.
164
166
  """
167
+ path = coerce_path(path)
165
168
  records = normalize_records(data, 'AVRO')
166
169
  if not records:
167
170
  return 0
168
171
 
169
- fastavro = get_fastavro()
172
+ fastavro = get_dependency('fastavro', format_name='AVRO')
170
173
  schema = _infer_schema(records)
171
174
  parsed_schema = fastavro.parse_schema(schema)
172
175
 
173
- path.parent.mkdir(parents=True, exist_ok=True)
176
+ ensure_parent_dir(path)
174
177
  with path.open('wb') as handle:
175
178
  fastavro.writer(handle, parsed_schema, records)
176
179
 
etlplus/file/bson.py CHANGED
@@ -17,13 +17,15 @@ Notes
17
17
 
18
18
  from __future__ import annotations
19
19
 
20
- from pathlib import Path
21
20
  from typing import Any
22
21
  from typing import cast
23
22
 
24
23
  from ..types import JSONData
25
24
  from ..types import JSONList
26
- from ._imports import get_optional_module
25
+ from ..types import StrPath
26
+ from ._imports import get_dependency
27
+ from ._io import coerce_path
28
+ from ._io import ensure_parent_dir
27
29
  from ._io import normalize_records
28
30
 
29
31
  # SECTION: EXPORTS ========================================================== #
@@ -39,7 +41,30 @@ __all__ = [
39
41
  # SECTION: INTERNAL FUNCTIONS =============================================== #
40
42
 
41
43
 
42
- def _decode_all(bson_module: Any, payload: bytes) -> list[dict[str, Any]]:
44
+ def _decode_all(
45
+ bson_module: Any,
46
+ payload: bytes,
47
+ ) -> list[dict[str, Any]]:
48
+ """
49
+ Decode all BSON documents from raw payload bytes.
50
+
51
+ Parameters
52
+ ----------
53
+ bson_module : Any
54
+ The imported BSON module to use for decoding.
55
+ payload : bytes
56
+ Raw bytes read from the BSON file.
57
+
58
+ Returns
59
+ -------
60
+ list[dict[str, Any]]
61
+ List of decoded BSON documents as dictionaries.
62
+
63
+ Raises
64
+ ------
65
+ AttributeError
66
+ If the bson module lacks the required :meth:`decode_all()` method.
67
+ """
43
68
  if hasattr(bson_module, 'decode_all'):
44
69
  return bson_module.decode_all(payload)
45
70
  if hasattr(bson_module, 'BSON'):
@@ -47,7 +72,30 @@ def _decode_all(bson_module: Any, payload: bytes) -> list[dict[str, Any]]:
47
72
  raise AttributeError('bson module lacks decode_all()')
48
73
 
49
74
 
50
- def _encode_doc(bson_module: Any, doc: dict[str, Any]) -> bytes:
75
+ def _encode_doc(
76
+ bson_module: Any,
77
+ doc: dict[str, Any],
78
+ ) -> bytes:
79
+ """
80
+ Encode a single BSON document to bytes.
81
+
82
+ Parameters
83
+ ----------
84
+ bson_module : Any
85
+ The imported BSON module to use for encoding.
86
+ doc : dict[str, Any]
87
+ The BSON document to encode.
88
+
89
+ Returns
90
+ -------
91
+ bytes
92
+ The encoded BSON document as bytes.
93
+
94
+ Raises
95
+ ------
96
+ AttributeError
97
+ If the bson module lacks the required :meth:`encode()` method.
98
+ """
51
99
  if hasattr(bson_module, 'encode'):
52
100
  return bson_module.encode(doc)
53
101
  if hasattr(bson_module, 'BSON'):
@@ -55,29 +103,18 @@ def _encode_doc(bson_module: Any, doc: dict[str, Any]) -> bytes:
55
103
  raise AttributeError('bson module lacks encode()')
56
104
 
57
105
 
58
- def _get_bson() -> Any:
59
- """Return the bson module, importing it on first use."""
60
- return get_optional_module(
61
- 'bson',
62
- error_message=(
63
- 'BSON support requires optional dependency "pymongo".\n'
64
- 'Install with: pip install pymongo'
65
- ),
66
- )
67
-
68
-
69
106
  # SECTION: FUNCTIONS ======================================================== #
70
107
 
71
108
 
72
109
  def read(
73
- path: Path,
110
+ path: StrPath,
74
111
  ) -> JSONList:
75
112
  """
76
113
  Read BSON content from *path*.
77
114
 
78
115
  Parameters
79
116
  ----------
80
- path : Path
117
+ path : StrPath
81
118
  Path to the BSON file on disk.
82
119
 
83
120
  Returns
@@ -85,7 +122,8 @@ def read(
85
122
  JSONList
86
123
  The list of dictionaries read from the BSON file.
87
124
  """
88
- bson = _get_bson()
125
+ path = coerce_path(path)
126
+ bson = get_dependency('bson', format_name='BSON', pip_name='pymongo')
89
127
  with path.open('rb') as handle:
90
128
  payload = handle.read()
91
129
  docs = _decode_all(bson, payload)
@@ -93,7 +131,7 @@ def read(
93
131
 
94
132
 
95
133
  def write(
96
- path: Path,
134
+ path: StrPath,
97
135
  data: JSONData,
98
136
  ) -> int:
99
137
  """
@@ -101,7 +139,7 @@ def write(
101
139
 
102
140
  Parameters
103
141
  ----------
104
- path : Path
142
+ path : StrPath
105
143
  Path to the BSON file on disk.
106
144
  data : JSONData
107
145
  Data to write as BSON. Should be a list of dictionaries or a
@@ -112,12 +150,13 @@ def write(
112
150
  int
113
151
  The number of rows written to the BSON file.
114
152
  """
115
- bson = _get_bson()
153
+ path = coerce_path(path)
154
+ bson = get_dependency('bson', format_name='BSON', pip_name='pymongo')
116
155
  records = normalize_records(data, 'BSON')
117
156
  if not records:
118
157
  return 0
119
158
 
120
- path.parent.mkdir(parents=True, exist_ok=True)
159
+ ensure_parent_dir(path)
121
160
  with path.open('wb') as handle:
122
161
  for record in records:
123
162
  handle.write(_encode_doc(bson, record))
etlplus/file/cbor.py CHANGED
@@ -18,12 +18,12 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
- from typing import Any
23
-
24
21
  from ..types import JSONData
25
- from ._imports import get_optional_module
22
+ from ..types import StrPath
23
+ from ._imports import get_dependency
24
+ from ._io import coerce_path
26
25
  from ._io import coerce_record_payload
26
+ from ._io import ensure_parent_dir
27
27
  from ._io import normalize_records
28
28
 
29
29
  # SECTION: EXPORTS ========================================================== #
@@ -36,32 +36,18 @@ __all__ = [
36
36
  ]
37
37
 
38
38
 
39
- # SECTION: INTERNAL FUNCTIONS =============================================== #
40
-
41
-
42
- def _get_cbor() -> Any:
43
- """Return the cbor2 module, importing it on first use."""
44
- return get_optional_module(
45
- 'cbor2',
46
- error_message=(
47
- 'CBOR support requires optional dependency "cbor2".\n'
48
- 'Install with: pip install cbor2'
49
- ),
50
- )
51
-
52
-
53
39
  # SECTION: FUNCTIONS ======================================================== #
54
40
 
55
41
 
56
42
  def read(
57
- path: Path,
43
+ path: StrPath,
58
44
  ) -> JSONData:
59
45
  """
60
46
  Read CBOR content from *path*.
61
47
 
62
48
  Parameters
63
49
  ----------
64
- path : Path
50
+ path : StrPath
65
51
  Path to the CBOR file on disk.
66
52
 
67
53
  Returns
@@ -69,14 +55,15 @@ def read(
69
55
  JSONData
70
56
  The structured data read from the CBOR file.
71
57
  """
72
- cbor2 = _get_cbor()
58
+ path = coerce_path(path)
59
+ cbor2 = get_dependency('cbor2', format_name='CBOR')
73
60
  with path.open('rb') as handle:
74
61
  payload = cbor2.loads(handle.read())
75
62
  return coerce_record_payload(payload, format_name='CBOR')
76
63
 
77
64
 
78
65
  def write(
79
- path: Path,
66
+ path: StrPath,
80
67
  data: JSONData,
81
68
  ) -> int:
82
69
  """
@@ -84,7 +71,7 @@ def write(
84
71
 
85
72
  Parameters
86
73
  ----------
87
- path : Path
74
+ path : StrPath
88
75
  Path to the CBOR file on disk.
89
76
  data : JSONData
90
77
  Data to write as CBOR file. Should be a list of dictionaries or a
@@ -95,10 +82,11 @@ def write(
95
82
  int
96
83
  The number of rows written to the CBOR file.
97
84
  """
98
- cbor2 = _get_cbor()
85
+ path = coerce_path(path)
86
+ cbor2 = get_dependency('cbor2', format_name='CBOR')
99
87
  records = normalize_records(data, 'CBOR')
100
88
  payload: JSONData = records if isinstance(data, list) else records[0]
101
- path.parent.mkdir(parents=True, exist_ok=True)
89
+ ensure_parent_dir(path)
102
90
  with path.open('wb') as handle:
103
91
  handle.write(cbor2.dumps(payload))
104
92
  return len(records)
etlplus/file/cfg.py CHANGED
@@ -19,11 +19,11 @@ Notes
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from pathlib import Path
23
-
24
22
  from ..types import JSONData
25
23
  from ..types import JSONList
24
+ from ..types import StrPath
26
25
  from . import stub
26
+ from ._io import coerce_path
27
27
 
28
28
  # SECTION: EXPORTS ========================================================== #
29
29
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read CFG content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the CFG file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the CFG file on disk.
71
71
  data : JSONData
72
72
  Data to write as CFG file. Should be a list of dictionaries or a
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the CFG file.
79
79
  """
80
+ path = coerce_path(path)
80
81
  return stub.write(path, data, format_name='CFG')
etlplus/file/conf.py CHANGED
@@ -20,11 +20,11 @@ Notes
20
20
 
21
21
  from __future__ import annotations
22
22
 
23
- from pathlib import Path
24
-
25
23
  from ..types import JSONData
26
24
  from ..types import JSONList
25
+ from ..types import StrPath
27
26
  from . import stub
27
+ from ._io import coerce_path
28
28
 
29
29
  # SECTION: EXPORTS ========================================================== #
30
30
 
@@ -40,14 +40,14 @@ __all__ = [
40
40
 
41
41
 
42
42
  def read(
43
- path: Path,
43
+ path: StrPath,
44
44
  ) -> JSONList:
45
45
  """
46
46
  Read CONF content from *path*.
47
47
 
48
48
  Parameters
49
49
  ----------
50
- path : Path
50
+ path : StrPath
51
51
  Path to the CONF file on disk.
52
52
 
53
53
  Returns
@@ -59,7 +59,7 @@ def read(
59
59
 
60
60
 
61
61
  def write(
62
- path: Path,
62
+ path: StrPath,
63
63
  data: JSONData,
64
64
  ) -> int:
65
65
  """
@@ -67,7 +67,7 @@ def write(
67
67
 
68
68
  Parameters
69
69
  ----------
70
- path : Path
70
+ path : StrPath
71
71
  Path to the CONF file on disk.
72
72
  data : JSONData
73
73
  Data to write as CONF. Should be a list of dictionaries or a
@@ -78,4 +78,5 @@ def write(
78
78
  int
79
79
  The number of rows written to the CONF file.
80
80
  """
81
+ path = coerce_path(path)
81
82
  return stub.write(path, data, format_name='CONF')
etlplus/file/core.py CHANGED
@@ -265,7 +265,7 @@ class File:
265
265
 
266
266
  def read(self) -> JSONData:
267
267
  """
268
- Read structured data from :attr:path` using :attr:`file_format`.
268
+ Read structured data from :attr:`path` using :attr:`file_format`.
269
269
 
270
270
  Returns
271
271
  -------
etlplus/file/csv.py CHANGED
@@ -18,10 +18,10 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
-
23
21
  from ..types import JSONData
24
22
  from ..types import JSONList
23
+ from ..types import StrPath
24
+ from ._io import coerce_path
25
25
  from ._io import read_delimited
26
26
  from ._io import write_delimited
27
27
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read CSV content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the CSV file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the CSV file on disk.
71
71
  data : JSONData
72
72
  Data to write as CSV. Should be a list of dictionaries or a
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the CSV file.
79
79
  """
80
- return write_delimited(path, data, delimiter=',')
80
+ path = coerce_path(path)
81
+ return write_delimited(path, data, delimiter=',', format_name='CSV')
etlplus/file/dat.py CHANGED
@@ -19,12 +19,13 @@ Notes
19
19
  from __future__ import annotations
20
20
 
21
21
  import csv
22
- from pathlib import Path
23
22
  from typing import cast
24
23
 
25
24
  from ..types import JSONData
26
25
  from ..types import JSONDict
27
26
  from ..types import JSONList
27
+ from ..types import StrPath
28
+ from ._io import coerce_path
28
29
  from ._io import write_delimited
29
30
 
30
31
  # SECTION: EXPORTS ========================================================== #
@@ -41,14 +42,14 @@ __all__ = [
41
42
 
42
43
 
43
44
  def read(
44
- path: Path,
45
+ path: StrPath,
45
46
  ) -> JSONList:
46
47
  """
47
48
  Read DAT content from *path*.
48
49
 
49
50
  Parameters
50
51
  ----------
51
- path : Path
52
+ path : StrPath
52
53
  Path to the DAT file on disk.
53
54
 
54
55
  Returns
@@ -56,6 +57,7 @@ def read(
56
57
  JSONList
57
58
  The list of dictionaries read from the DAT file.
58
59
  """
60
+ path = coerce_path(path)
59
61
  with path.open('r', encoding='utf-8', newline='') as handle:
60
62
  sample = handle.read(4096)
61
63
  handle.seek(0)
@@ -95,7 +97,7 @@ def read(
95
97
 
96
98
 
97
99
  def write(
98
- path: Path,
100
+ path: StrPath,
99
101
  data: JSONData,
100
102
  ) -> int:
101
103
  """
@@ -103,7 +105,7 @@ def write(
103
105
 
104
106
  Parameters
105
107
  ----------
106
- path : Path
108
+ path : StrPath
107
109
  Path to the DAT file on disk.
108
110
  data : JSONData
109
111
  Data to write as DAT file. Should be a list of dictionaries or a
@@ -114,4 +116,5 @@ def write(
114
116
  int
115
117
  The number of rows written to the DAT file.
116
118
  """
117
- return write_delimited(path, data, delimiter=',')
119
+ path = coerce_path(path)
120
+ return write_delimited(path, data, delimiter=',', format_name='DAT')