etlplus 0.16.10__py3-none-any.whl → 0.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. etlplus/file/README.md +33 -0
  2. etlplus/file/_imports.py +35 -20
  3. etlplus/file/_io.py +138 -15
  4. etlplus/file/_r.py +48 -0
  5. etlplus/file/_sql.py +224 -0
  6. etlplus/file/accdb.py +7 -6
  7. etlplus/file/arrow.py +29 -10
  8. etlplus/file/avro.py +13 -10
  9. etlplus/file/bson.py +94 -10
  10. etlplus/file/cbor.py +29 -17
  11. etlplus/file/cfg.py +7 -6
  12. etlplus/file/conf.py +7 -6
  13. etlplus/file/core.py +1 -1
  14. etlplus/file/csv.py +8 -7
  15. etlplus/file/dat.py +52 -11
  16. etlplus/file/dta.py +36 -16
  17. etlplus/file/duckdb.py +72 -11
  18. etlplus/file/enums.py +29 -0
  19. etlplus/file/feather.py +15 -30
  20. etlplus/file/fwf.py +44 -10
  21. etlplus/file/gz.py +12 -7
  22. etlplus/file/hbs.py +7 -6
  23. etlplus/file/hdf5.py +71 -8
  24. etlplus/file/ini.py +60 -17
  25. etlplus/file/ion.py +7 -6
  26. etlplus/file/jinja2.py +7 -6
  27. etlplus/file/json.py +10 -11
  28. etlplus/file/log.py +7 -6
  29. etlplus/file/mat.py +7 -6
  30. etlplus/file/mdb.py +7 -6
  31. etlplus/file/msgpack.py +27 -15
  32. etlplus/file/mustache.py +7 -6
  33. etlplus/file/nc.py +69 -11
  34. etlplus/file/ndjson.py +10 -6
  35. etlplus/file/numbers.py +7 -6
  36. etlplus/file/ods.py +48 -11
  37. etlplus/file/orc.py +15 -30
  38. etlplus/file/parquet.py +10 -6
  39. etlplus/file/pb.py +36 -24
  40. etlplus/file/pbf.py +7 -6
  41. etlplus/file/properties.py +44 -18
  42. etlplus/file/proto.py +24 -18
  43. etlplus/file/psv.py +12 -11
  44. etlplus/file/rda.py +57 -15
  45. etlplus/file/rds.py +50 -14
  46. etlplus/file/sas7bdat.py +26 -16
  47. etlplus/file/sav.py +34 -16
  48. etlplus/file/sqlite.py +70 -10
  49. etlplus/file/stub.py +8 -6
  50. etlplus/file/sylk.py +7 -6
  51. etlplus/file/tab.py +13 -13
  52. etlplus/file/toml.py +56 -17
  53. etlplus/file/tsv.py +8 -7
  54. etlplus/file/txt.py +10 -7
  55. etlplus/file/vm.py +7 -6
  56. etlplus/file/wks.py +7 -6
  57. etlplus/file/xls.py +8 -5
  58. etlplus/file/xlsm.py +48 -10
  59. etlplus/file/xlsx.py +10 -6
  60. etlplus/file/xml.py +11 -9
  61. etlplus/file/xpt.py +46 -10
  62. etlplus/file/yaml.py +10 -11
  63. etlplus/file/zip.py +10 -5
  64. etlplus/file/zsav.py +7 -6
  65. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/METADATA +44 -26
  66. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/RECORD +70 -68
  67. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/WHEEL +0 -0
  68. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/entry_points.txt +0 -0
  69. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/licenses/LICENSE +0 -0
  70. {etlplus-0.16.10.dist-info → etlplus-0.17.3.dist-info}/top_level.txt +0 -0
etlplus/file/accdb.py CHANGED
@@ -19,11 +19,11 @@ Notes
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from pathlib import Path
23
-
24
22
  from ..types import JSONData
25
23
  from ..types import JSONList
24
+ from ..types import StrPath
26
25
  from . import stub
26
+ from ._io import coerce_path
27
27
 
28
28
  # SECTION: EXPORTS ========================================================== #
29
29
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read ACCDB content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the ACCDB file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the ACCDB file on disk.
71
71
  data : JSONData
72
72
  Data to write as ACCDB. Should be a list of dictionaries or a single
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the ACCDB file.
79
79
  """
80
+ path = coerce_path(path)
80
81
  return stub.write(path, data, format_name='ACCDB')
etlplus/file/arrow.py CHANGED
@@ -1,8 +1,7 @@
1
1
  """
2
2
  :mod:`etlplus.file.arrow` module.
3
3
 
4
- Stub helpers for reading/writing Apache Arrow (ARROW) files (not implemented
5
- yet).
4
+ Helpers for reading/writing Apache Arrow (ARROW) files.
6
5
 
7
6
  Notes
8
7
  -----
@@ -19,11 +18,15 @@ Notes
19
18
 
20
19
  from __future__ import annotations
21
20
 
22
- from pathlib import Path
21
+ from typing import cast
23
22
 
24
23
  from ..types import JSONData
25
24
  from ..types import JSONList
26
- from . import stub
25
+ from ..types import StrPath
26
+ from ._imports import get_dependency
27
+ from ._io import coerce_path
28
+ from ._io import ensure_parent_dir
29
+ from ._io import normalize_records
27
30
 
28
31
  # SECTION: EXPORTS ========================================================== #
29
32
 
@@ -39,14 +42,14 @@ __all__ = [
39
42
 
40
43
 
41
44
  def read(
42
- path: Path,
45
+ path: StrPath,
43
46
  ) -> JSONList:
44
47
  """
45
48
  Read ARROW content from *path*.
46
49
 
47
50
  Parameters
48
51
  ----------
49
- path : Path
52
+ path : StrPath
50
53
  Path to the Apache Arrow file on disk.
51
54
 
52
55
  Returns
@@ -54,11 +57,16 @@ def read(
54
57
  JSONList
55
58
  The list of dictionaries read from the Apache Arrow file.
56
59
  """
57
- return stub.read(path, format_name='ARROW')
60
+ path = coerce_path(path)
61
+ pyarrow = get_dependency('pyarrow', format_name='ARROW')
62
+ with pyarrow.memory_map(str(path), 'r') as source:
63
+ reader = pyarrow.ipc.open_file(source)
64
+ table = reader.read_all()
65
+ return cast(JSONList, table.to_pylist())
58
66
 
59
67
 
60
68
  def write(
61
- path: Path,
69
+ path: StrPath,
62
70
  data: JSONData,
63
71
  ) -> int:
64
72
  """
@@ -66,7 +74,7 @@ def write(
66
74
 
67
75
  Parameters
68
76
  ----------
69
- path : Path
77
+ path : StrPath
70
78
  Path to the ARROW file on disk.
71
79
  data : JSONData
72
80
  Data to write as ARROW. Should be a list of dictionaries or a
@@ -77,4 +85,15 @@ def write(
77
85
  int
78
86
  The number of rows written to the ARROW file.
79
87
  """
80
- return stub.write(path, data, format_name='ARROW')
88
+ path = coerce_path(path)
89
+ records = normalize_records(data, 'ARROW')
90
+ if not records:
91
+ return 0
92
+
93
+ pyarrow = get_dependency('pyarrow', format_name='ARROW')
94
+ table = pyarrow.Table.from_pylist(records)
95
+ ensure_parent_dir(path)
96
+ with pyarrow.OSFile(str(path), 'wb') as sink:
97
+ with pyarrow.ipc.new_file(sink, table.schema) as writer:
98
+ writer.write_table(table)
99
+ return len(records)
etlplus/file/avro.py CHANGED
@@ -18,15 +18,16 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
21
  from typing import Any
23
22
  from typing import cast
24
23
 
25
- from etlplus.file._imports import get_fastavro
26
-
27
24
  from ..types import JSONData
28
25
  from ..types import JSONDict
29
26
  from ..types import JSONList
27
+ from ..types import StrPath
28
+ from ._imports import get_dependency
29
+ from ._io import coerce_path
30
+ from ._io import ensure_parent_dir
30
31
  from ._io import normalize_records
31
32
 
32
33
  # SECTION: EXPORTS ========================================================== #
@@ -122,14 +123,14 @@ def _merge_types(types: list[str]) -> str | list[str]:
122
123
 
123
124
 
124
125
  def read(
125
- path: Path,
126
+ path: StrPath,
126
127
  ) -> JSONList:
127
128
  """
128
129
  Read AVRO content from *path*.
129
130
 
130
131
  Parameters
131
132
  ----------
132
- path : Path
133
+ path : StrPath
133
134
  Path to the AVRO file on disk.
134
135
 
135
136
  Returns
@@ -137,14 +138,15 @@ def read(
137
138
  JSONList
138
139
  The list of dictionaries read from the AVRO file.
139
140
  """
140
- fastavro = get_fastavro()
141
+ path = coerce_path(path)
142
+ fastavro = get_dependency('fastavro', format_name='AVRO')
141
143
  with path.open('rb') as handle:
142
144
  reader = fastavro.reader(handle)
143
145
  return [cast(JSONDict, record) for record in reader]
144
146
 
145
147
 
146
148
  def write(
147
- path: Path,
149
+ path: StrPath,
148
150
  data: JSONData,
149
151
  ) -> int:
150
152
  """
@@ -152,7 +154,7 @@ def write(
152
154
 
153
155
  Parameters
154
156
  ----------
155
- path : Path
157
+ path : StrPath
156
158
  Path to the AVRO file on disk.
157
159
  data : JSONData
158
160
  Data to write.
@@ -162,15 +164,16 @@ def write(
162
164
  int
163
165
  Number of records written.
164
166
  """
167
+ path = coerce_path(path)
165
168
  records = normalize_records(data, 'AVRO')
166
169
  if not records:
167
170
  return 0
168
171
 
169
- fastavro = get_fastavro()
172
+ fastavro = get_dependency('fastavro', format_name='AVRO')
170
173
  schema = _infer_schema(records)
171
174
  parsed_schema = fastavro.parse_schema(schema)
172
175
 
173
- path.parent.mkdir(parents=True, exist_ok=True)
176
+ ensure_parent_dir(path)
174
177
  with path.open('wb') as handle:
175
178
  fastavro.writer(handle, parsed_schema, records)
176
179
 
etlplus/file/bson.py CHANGED
@@ -1,8 +1,7 @@
1
1
  """
2
2
  :mod:`etlplus.file.bson` module.
3
3
 
4
- Stub helpers for reading/writing Binary JSON (BSON) files (not implemented
5
- yet).
4
+ Helpers for reading/writing Binary JSON (BSON) files.
6
5
 
7
6
  Notes
8
7
  -----
@@ -18,11 +17,16 @@ Notes
18
17
 
19
18
  from __future__ import annotations
20
19
 
21
- from pathlib import Path
20
+ from typing import Any
21
+ from typing import cast
22
22
 
23
23
  from ..types import JSONData
24
24
  from ..types import JSONList
25
- from . import stub
25
+ from ..types import StrPath
26
+ from ._imports import get_dependency
27
+ from ._io import coerce_path
28
+ from ._io import ensure_parent_dir
29
+ from ._io import normalize_records
26
30
 
27
31
  # SECTION: EXPORTS ========================================================== #
28
32
 
@@ -34,18 +38,83 @@ __all__ = [
34
38
  ]
35
39
 
36
40
 
41
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
42
+
43
+
44
+ def _decode_all(
45
+ bson_module: Any,
46
+ payload: bytes,
47
+ ) -> list[dict[str, Any]]:
48
+ """
49
+ Decode all BSON documents from raw payload bytes.
50
+
51
+ Parameters
52
+ ----------
53
+ bson_module : Any
54
+ The imported BSON module to use for decoding.
55
+ payload : bytes
56
+ Raw bytes read from the BSON file.
57
+
58
+ Returns
59
+ -------
60
+ list[dict[str, Any]]
61
+ List of decoded BSON documents as dictionaries.
62
+
63
+ Raises
64
+ ------
65
+ AttributeError
66
+ If the bson module lacks the required :meth:`decode_all()` method.
67
+ """
68
+ if hasattr(bson_module, 'decode_all'):
69
+ return bson_module.decode_all(payload)
70
+ if hasattr(bson_module, 'BSON'):
71
+ return bson_module.BSON.decode_all(payload)
72
+ raise AttributeError('bson module lacks decode_all()')
73
+
74
+
75
+ def _encode_doc(
76
+ bson_module: Any,
77
+ doc: dict[str, Any],
78
+ ) -> bytes:
79
+ """
80
+ Encode a single BSON document to bytes.
81
+
82
+ Parameters
83
+ ----------
84
+ bson_module : Any
85
+ The imported BSON module to use for encoding.
86
+ doc : dict[str, Any]
87
+ The BSON document to encode.
88
+
89
+ Returns
90
+ -------
91
+ bytes
92
+ The encoded BSON document as bytes.
93
+
94
+ Raises
95
+ ------
96
+ AttributeError
97
+ If the bson module lacks the required :meth:`encode()` method.
98
+ """
99
+ if hasattr(bson_module, 'encode'):
100
+ return bson_module.encode(doc)
101
+ if hasattr(bson_module, 'BSON'):
102
+ return bson_module.BSON.encode(doc)
103
+ raise AttributeError('bson module lacks encode()')
104
+
105
+
37
106
  # SECTION: FUNCTIONS ======================================================== #
38
107
 
39
108
 
40
109
  def read(
41
- path: Path,
110
+ path: StrPath,
42
111
  ) -> JSONList:
43
112
  """
44
113
  Read BSON content from *path*.
45
114
 
46
115
  Parameters
47
116
  ----------
48
- path : Path
117
+ path : StrPath
49
118
  Path to the BSON file on disk.
50
119
 
51
120
  Returns
@@ -53,11 +122,16 @@ def read(
53
122
  JSONList
54
123
  The list of dictionaries read from the BSON file.
55
124
  """
56
- return stub.read(path, format_name='BSON')
125
+ path = coerce_path(path)
126
+ bson = get_dependency('bson', format_name='BSON', pip_name='pymongo')
127
+ with path.open('rb') as handle:
128
+ payload = handle.read()
129
+ docs = _decode_all(bson, payload)
130
+ return cast(JSONList, docs)
57
131
 
58
132
 
59
133
  def write(
60
- path: Path,
134
+ path: StrPath,
61
135
  data: JSONData,
62
136
  ) -> int:
63
137
  """
@@ -65,7 +139,7 @@ def write(
65
139
 
66
140
  Parameters
67
141
  ----------
68
- path : Path
142
+ path : StrPath
69
143
  Path to the BSON file on disk.
70
144
  data : JSONData
71
145
  Data to write as BSON. Should be a list of dictionaries or a
@@ -76,4 +150,14 @@ def write(
76
150
  int
77
151
  The number of rows written to the BSON file.
78
152
  """
79
- return stub.write(path, data, format_name='BSON')
153
+ path = coerce_path(path)
154
+ bson = get_dependency('bson', format_name='BSON', pip_name='pymongo')
155
+ records = normalize_records(data, 'BSON')
156
+ if not records:
157
+ return 0
158
+
159
+ ensure_parent_dir(path)
160
+ with path.open('wb') as handle:
161
+ for record in records:
162
+ handle.write(_encode_doc(bson, record))
163
+ return len(records)
etlplus/file/cbor.py CHANGED
@@ -1,8 +1,7 @@
1
1
  """
2
2
  :mod:`etlplus.file.cbor` module.
3
3
 
4
- Stub helpers for reading/writing Concise Binary Object Representation (CBOR)
5
- files (not implemented yet).
4
+ Helpers for reading/writing Concise Binary Object Representation (CBOR) files.
6
5
 
7
6
  Notes
8
7
  -----
@@ -19,11 +18,13 @@ Notes
19
18
 
20
19
  from __future__ import annotations
21
20
 
22
- from pathlib import Path
23
-
24
21
  from ..types import JSONData
25
- from ..types import JSONList
26
- from . import stub
22
+ from ..types import StrPath
23
+ from ._imports import get_dependency
24
+ from ._io import coerce_path
25
+ from ._io import coerce_record_payload
26
+ from ._io import ensure_parent_dir
27
+ from ._io import normalize_records
27
28
 
28
29
  # SECTION: EXPORTS ========================================================== #
29
30
 
@@ -39,37 +40,41 @@ __all__ = [
39
40
 
40
41
 
41
42
  def read(
42
- path: Path,
43
- ) -> JSONList:
43
+ path: StrPath,
44
+ ) -> JSONData:
44
45
  """
45
46
  Read CBOR content from *path*.
46
47
 
47
48
  Parameters
48
49
  ----------
49
- path : Path
50
+ path : StrPath
50
51
  Path to the CBOR file on disk.
51
52
 
52
53
  Returns
53
54
  -------
54
- JSONList
55
- The list of dictionaries read from the CBOR file.
55
+ JSONData
56
+ The structured data read from the CBOR file.
56
57
  """
57
- return stub.read(path, format_name='CBOR')
58
+ path = coerce_path(path)
59
+ cbor2 = get_dependency('cbor2', format_name='CBOR')
60
+ with path.open('rb') as handle:
61
+ payload = cbor2.loads(handle.read())
62
+ return coerce_record_payload(payload, format_name='CBOR')
58
63
 
59
64
 
60
65
  def write(
61
- path: Path,
66
+ path: StrPath,
62
67
  data: JSONData,
63
68
  ) -> int:
64
69
  """
65
- Write *data* to CBOR at *path* and return record count.
70
+ Write *data* to CBOR file at *path* and return record count.
66
71
 
67
72
  Parameters
68
73
  ----------
69
- path : Path
74
+ path : StrPath
70
75
  Path to the CBOR file on disk.
71
76
  data : JSONData
72
- Data to write as CBOR. Should be a list of dictionaries or a
77
+ Data to write as CBOR file. Should be a list of dictionaries or a
73
78
  single dictionary.
74
79
 
75
80
  Returns
@@ -77,4 +82,11 @@ def write(
77
82
  int
78
83
  The number of rows written to the CBOR file.
79
84
  """
80
- return stub.write(path, data, format_name='CBOR')
85
+ path = coerce_path(path)
86
+ cbor2 = get_dependency('cbor2', format_name='CBOR')
87
+ records = normalize_records(data, 'CBOR')
88
+ payload: JSONData = records if isinstance(data, list) else records[0]
89
+ ensure_parent_dir(path)
90
+ with path.open('wb') as handle:
91
+ handle.write(cbor2.dumps(payload))
92
+ return len(records)
etlplus/file/cfg.py CHANGED
@@ -19,11 +19,11 @@ Notes
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from pathlib import Path
23
-
24
22
  from ..types import JSONData
25
23
  from ..types import JSONList
24
+ from ..types import StrPath
26
25
  from . import stub
26
+ from ._io import coerce_path
27
27
 
28
28
  # SECTION: EXPORTS ========================================================== #
29
29
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read CFG content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the CFG file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the CFG file on disk.
71
71
  data : JSONData
72
72
  Data to write as CFG file. Should be a list of dictionaries or a
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the CFG file.
79
79
  """
80
+ path = coerce_path(path)
80
81
  return stub.write(path, data, format_name='CFG')
etlplus/file/conf.py CHANGED
@@ -20,11 +20,11 @@ Notes
20
20
 
21
21
  from __future__ import annotations
22
22
 
23
- from pathlib import Path
24
-
25
23
  from ..types import JSONData
26
24
  from ..types import JSONList
25
+ from ..types import StrPath
27
26
  from . import stub
27
+ from ._io import coerce_path
28
28
 
29
29
  # SECTION: EXPORTS ========================================================== #
30
30
 
@@ -40,14 +40,14 @@ __all__ = [
40
40
 
41
41
 
42
42
  def read(
43
- path: Path,
43
+ path: StrPath,
44
44
  ) -> JSONList:
45
45
  """
46
46
  Read CONF content from *path*.
47
47
 
48
48
  Parameters
49
49
  ----------
50
- path : Path
50
+ path : StrPath
51
51
  Path to the CONF file on disk.
52
52
 
53
53
  Returns
@@ -59,7 +59,7 @@ def read(
59
59
 
60
60
 
61
61
  def write(
62
- path: Path,
62
+ path: StrPath,
63
63
  data: JSONData,
64
64
  ) -> int:
65
65
  """
@@ -67,7 +67,7 @@ def write(
67
67
 
68
68
  Parameters
69
69
  ----------
70
- path : Path
70
+ path : StrPath
71
71
  Path to the CONF file on disk.
72
72
  data : JSONData
73
73
  Data to write as CONF. Should be a list of dictionaries or a
@@ -78,4 +78,5 @@ def write(
78
78
  int
79
79
  The number of rows written to the CONF file.
80
80
  """
81
+ path = coerce_path(path)
81
82
  return stub.write(path, data, format_name='CONF')
etlplus/file/core.py CHANGED
@@ -265,7 +265,7 @@ class File:
265
265
 
266
266
  def read(self) -> JSONData:
267
267
  """
268
- Read structured data from :attr:path` using :attr:`file_format`.
268
+ Read structured data from :attr:`path` using :attr:`file_format`.
269
269
 
270
270
  Returns
271
271
  -------
etlplus/file/csv.py CHANGED
@@ -18,10 +18,10 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
-
23
21
  from ..types import JSONData
24
22
  from ..types import JSONList
23
+ from ..types import StrPath
24
+ from ._io import coerce_path
25
25
  from ._io import read_delimited
26
26
  from ._io import write_delimited
27
27
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read CSV content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the CSV file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the CSV file on disk.
71
71
  data : JSONData
72
72
  Data to write as CSV. Should be a list of dictionaries or a
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the CSV file.
79
79
  """
80
- return write_delimited(path, data, delimiter=',')
80
+ path = coerce_path(path)
81
+ return write_delimited(path, data, delimiter=',', format_name='CSV')