etlplus 0.17.2__py3-none-any.whl → 0.17.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. etlplus/file/_imports.py +35 -20
  2. etlplus/file/_io.py +138 -15
  3. etlplus/file/_r.py +48 -0
  4. etlplus/file/_sql.py +224 -0
  5. etlplus/file/accdb.py +7 -6
  6. etlplus/file/arrow.py +13 -24
  7. etlplus/file/avro.py +13 -10
  8. etlplus/file/bson.py +61 -22
  9. etlplus/file/cbor.py +13 -25
  10. etlplus/file/cfg.py +7 -6
  11. etlplus/file/conf.py +7 -6
  12. etlplus/file/core.py +1 -1
  13. etlplus/file/csv.py +8 -7
  14. etlplus/file/dat.py +9 -6
  15. etlplus/file/dta.py +15 -30
  16. etlplus/file/duckdb.py +29 -122
  17. etlplus/file/feather.py +15 -30
  18. etlplus/file/fwf.py +16 -14
  19. etlplus/file/gz.py +12 -7
  20. etlplus/file/hbs.py +7 -6
  21. etlplus/file/hdf5.py +31 -6
  22. etlplus/file/ini.py +17 -24
  23. etlplus/file/ion.py +7 -6
  24. etlplus/file/jinja2.py +7 -6
  25. etlplus/file/json.py +10 -11
  26. etlplus/file/log.py +7 -6
  27. etlplus/file/mat.py +7 -6
  28. etlplus/file/mdb.py +7 -6
  29. etlplus/file/msgpack.py +13 -25
  30. etlplus/file/mustache.py +7 -6
  31. etlplus/file/nc.py +30 -21
  32. etlplus/file/ndjson.py +10 -6
  33. etlplus/file/numbers.py +7 -6
  34. etlplus/file/ods.py +10 -6
  35. etlplus/file/orc.py +15 -30
  36. etlplus/file/parquet.py +10 -6
  37. etlplus/file/pb.py +22 -23
  38. etlplus/file/pbf.py +7 -6
  39. etlplus/file/properties.py +15 -29
  40. etlplus/file/proto.py +14 -20
  41. etlplus/file/psv.py +8 -7
  42. etlplus/file/rda.py +19 -51
  43. etlplus/file/rds.py +19 -51
  44. etlplus/file/sas7bdat.py +10 -30
  45. etlplus/file/sav.py +13 -24
  46. etlplus/file/sqlite.py +25 -83
  47. etlplus/file/stub.py +8 -6
  48. etlplus/file/sylk.py +7 -6
  49. etlplus/file/tab.py +8 -7
  50. etlplus/file/toml.py +14 -17
  51. etlplus/file/tsv.py +8 -7
  52. etlplus/file/txt.py +10 -7
  53. etlplus/file/vm.py +7 -6
  54. etlplus/file/wks.py +7 -6
  55. etlplus/file/xls.py +8 -5
  56. etlplus/file/xlsm.py +10 -6
  57. etlplus/file/xlsx.py +10 -6
  58. etlplus/file/xml.py +11 -9
  59. etlplus/file/xpt.py +13 -33
  60. etlplus/file/yaml.py +10 -11
  61. etlplus/file/zip.py +10 -5
  62. etlplus/file/zsav.py +7 -6
  63. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/METADATA +1 -1
  64. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/RECORD +68 -66
  65. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/WHEEL +0 -0
  66. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/entry_points.txt +0 -0
  67. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/licenses/LICENSE +0 -0
  68. {etlplus-0.17.2.dist-info → etlplus-0.17.3.dist-info}/top_level.txt +0 -0
etlplus/file/sqlite.py CHANGED
@@ -18,14 +18,21 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- import json
22
21
  import sqlite3
23
- from pathlib import Path
24
- from typing import Any
25
22
 
26
23
  from ..types import JSONData
27
24
  from ..types import JSONList
25
+ from ..types import StrPath
26
+ from ._io import coerce_path
27
+ from ._io import ensure_parent_dir
28
28
  from ._io import normalize_records
29
+ from ._sql import DEFAULT_TABLE
30
+ from ._sql import SQLITE_DIALECT
31
+ from ._sql import coerce_sql_value
32
+ from ._sql import collect_column_values
33
+ from ._sql import infer_column_type
34
+ from ._sql import quote_identifier
35
+ from ._sql import resolve_table
29
36
 
30
37
  # SECTION: EXPORTS ========================================================== #
31
38
 
@@ -37,81 +44,18 @@ __all__ = [
37
44
  ]
38
45
 
39
46
 
40
- # SECTION: INTERNAL CONSTANTS ============================================== #
41
-
42
-
43
- DEFAULT_TABLE = 'data'
44
-
45
-
46
- # SECTION: INTERNAL FUNCTIONS =============================================== #
47
-
48
-
49
- def _quote_identifier(value: str) -> str:
50
- """Return a safely quoted SQL identifier."""
51
- escaped = value.replace('"', '""')
52
- return f'"{escaped}"'
53
-
54
-
55
- def _coerce_sql_value(value: Any) -> Any:
56
- """Normalize values into SQLite-compatible types."""
57
- if value is None or isinstance(value, (str, int, float, bool)):
58
- return value
59
- return json.dumps(value, ensure_ascii=True)
60
-
61
-
62
- def _infer_column_type(values: list[Any]) -> str:
63
- """Infer a basic SQLite column type from sample values."""
64
- seen_bool = False
65
- seen_int = False
66
- seen_float = False
67
- seen_other = False
68
- for value in values:
69
- if value is None:
70
- continue
71
- if isinstance(value, bool):
72
- seen_bool = True
73
- elif isinstance(value, int):
74
- seen_int = True
75
- elif isinstance(value, float):
76
- seen_float = True
77
- else:
78
- seen_other = True
79
- break
80
- if seen_other:
81
- return 'TEXT'
82
- if seen_float:
83
- return 'REAL'
84
- if seen_int or seen_bool:
85
- return 'INTEGER'
86
- return 'TEXT'
87
-
88
-
89
- def _resolve_table(tables: list[str]) -> str | None:
90
- """Pick a table name for read operations."""
91
- if not tables:
92
- return None
93
- if DEFAULT_TABLE in tables:
94
- return DEFAULT_TABLE
95
- if len(tables) == 1:
96
- return tables[0]
97
- raise ValueError(
98
- 'Multiple tables found in SQLite file; expected "data" or a '
99
- 'single table',
100
- )
101
-
102
-
103
47
  # SECTION: FUNCTIONS ======================================================== #
104
48
 
105
49
 
106
50
  def read(
107
- path: Path,
51
+ path: StrPath,
108
52
  ) -> JSONList:
109
53
  """
110
54
  Read SQLITE content from *path*.
111
55
 
112
56
  Parameters
113
57
  ----------
114
- path : Path
58
+ path : StrPath
115
59
  Path to the SQLITE file on disk.
116
60
 
117
61
  Returns
@@ -119,6 +63,7 @@ def read(
119
63
  JSONList
120
64
  The list of dictionaries read from the SQLITE file.
121
65
  """
66
+ path = coerce_path(path)
122
67
  conn = sqlite3.connect(str(path))
123
68
  try:
124
69
  conn.row_factory = sqlite3.Row
@@ -128,10 +73,10 @@ def read(
128
73
  'ORDER BY name',
129
74
  )
130
75
  tables = [row[0] for row in cursor.fetchall()]
131
- table = _resolve_table(tables)
76
+ table = resolve_table(tables, engine_name='SQLite')
132
77
  if table is None:
133
78
  return []
134
- query = f'SELECT * FROM {_quote_identifier(table)}'
79
+ query = f'SELECT * FROM {quote_identifier(table)}'
135
80
  rows = conn.execute(query).fetchall()
136
81
  return [dict(row) for row in rows]
137
82
  finally:
@@ -139,7 +84,7 @@ def read(
139
84
 
140
85
 
141
86
  def write(
142
- path: Path,
87
+ path: StrPath,
143
88
  data: JSONData,
144
89
  ) -> int:
145
90
  """
@@ -147,7 +92,7 @@ def write(
147
92
 
148
93
  Parameters
149
94
  ----------
150
- path : Path
95
+ path : StrPath
151
96
  Path to the SQLITE file on disk.
152
97
  data : JSONData
153
98
  Data to write as SQLITE. Should be a list of dictionaries or a
@@ -158,37 +103,34 @@ def write(
158
103
  int
159
104
  The number of rows written to the SQLITE file.
160
105
  """
106
+ path = coerce_path(path)
161
107
  records = normalize_records(data, 'SQLITE')
162
108
  if not records:
163
109
  return 0
164
110
 
165
- columns = sorted({key for row in records for key in row})
111
+ columns, column_values = collect_column_values(records)
166
112
  if not columns:
167
113
  return 0
168
114
 
169
- column_values: dict[str, list[Any]] = {col: [] for col in columns}
170
- for row in records:
171
- for column in columns:
172
- column_values[column].append(row.get(column))
173
-
174
115
  column_defs = ', '.join(
175
- f'{_quote_identifier(column)} {_infer_column_type(values)}'
116
+ f'{quote_identifier(column)} '
117
+ f'{infer_column_type(values, SQLITE_DIALECT)}'
176
118
  for column, values in column_values.items()
177
119
  )
178
- table_ident = _quote_identifier(DEFAULT_TABLE)
179
- insert_columns = ', '.join(_quote_identifier(column) for column in columns)
120
+ table_ident = quote_identifier(DEFAULT_TABLE)
121
+ insert_columns = ', '.join(quote_identifier(column) for column in columns)
180
122
  placeholders = ', '.join('?' for _ in columns)
181
123
  insert_sql = (
182
124
  f'INSERT INTO {table_ident} ({insert_columns}) VALUES ({placeholders})'
183
125
  )
184
126
 
185
- path.parent.mkdir(parents=True, exist_ok=True)
127
+ ensure_parent_dir(path)
186
128
  conn = sqlite3.connect(str(path))
187
129
  try:
188
130
  conn.execute(f'DROP TABLE IF EXISTS {table_ident}')
189
131
  conn.execute(f'CREATE TABLE {table_ident} ({column_defs})')
190
132
  rows = [
191
- tuple(_coerce_sql_value(row.get(column)) for column in columns)
133
+ tuple(coerce_sql_value(row.get(column)) for column in columns)
192
134
  for row in records
193
135
  ]
194
136
  conn.executemany(insert_sql, rows)
etlplus/file/stub.py CHANGED
@@ -6,10 +6,10 @@ Helpers for reading/writing stubbed files.
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
- from pathlib import Path
10
-
11
9
  from ..types import JSONData
12
10
  from ..types import JSONList
11
+ from ..types import StrPath
12
+ from ._io import coerce_path
13
13
 
14
14
  # SECTION: EXPORTS ========================================================== #
15
15
 
@@ -25,7 +25,7 @@ __all__ = [
25
25
 
26
26
 
27
27
  def read(
28
- path: Path,
28
+ path: StrPath,
29
29
  format_name: str = 'Stubbed',
30
30
  ) -> JSONList:
31
31
  """
@@ -33,7 +33,7 @@ def read(
33
33
 
34
34
  Parameters
35
35
  ----------
36
- path : Path
36
+ path : StrPath
37
37
  Path to the stubbed file on disk.
38
38
  format_name : str
39
39
  Human-readable format name.
@@ -48,12 +48,13 @@ def read(
48
48
  NotImplementedError
49
49
  Always, since this is a stub implementation.
50
50
  """
51
+ path = coerce_path(path)
51
52
  _ = path
52
53
  raise NotImplementedError(f'{format_name} read is not implemented yet')
53
54
 
54
55
 
55
56
  def write(
56
- path: Path,
57
+ path: StrPath,
57
58
  data: JSONData,
58
59
  format_name: str = 'Stubbed',
59
60
  ) -> int:
@@ -62,7 +63,7 @@ def write(
62
63
 
63
64
  Parameters
64
65
  ----------
65
- path : Path
66
+ path : StrPath
66
67
  Path to the stubbed file on disk.
67
68
  data : JSONData
68
69
  Data to write as stubbed file. Should be a list of dictionaries or a
@@ -80,6 +81,7 @@ def write(
80
81
  NotImplementedError
81
82
  Always, since this is a stub implementation.
82
83
  """
84
+ path = coerce_path(path)
83
85
  _ = path
84
86
  _ = data
85
87
  raise NotImplementedError(f'{format_name} write is not implemented yet')
etlplus/file/sylk.py CHANGED
@@ -18,11 +18,11 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
-
23
21
  from ..types import JSONData
24
22
  from ..types import JSONList
23
+ from ..types import StrPath
25
24
  from . import stub
25
+ from ._io import coerce_path
26
26
 
27
27
  # SECTION: EXPORTS ========================================================== #
28
28
 
@@ -38,14 +38,14 @@ __all__ = [
38
38
 
39
39
 
40
40
  def read(
41
- path: Path,
41
+ path: StrPath,
42
42
  ) -> JSONList:
43
43
  """
44
44
  Read SYLK content from *path*.
45
45
 
46
46
  Parameters
47
47
  ----------
48
- path : Path
48
+ path : StrPath
49
49
  Path to the SYLK file on disk.
50
50
 
51
51
  Returns
@@ -57,7 +57,7 @@ def read(
57
57
 
58
58
 
59
59
  def write(
60
- path: Path,
60
+ path: StrPath,
61
61
  data: JSONData,
62
62
  ) -> int:
63
63
  """
@@ -65,7 +65,7 @@ def write(
65
65
 
66
66
  Parameters
67
67
  ----------
68
- path : Path
68
+ path : StrPath
69
69
  Path to the SYLK file on disk.
70
70
  data : JSONData
71
71
  Data to write as SYLK file. Should be a list of dictionaries or a
@@ -76,4 +76,5 @@ def write(
76
76
  int
77
77
  The number of rows written to the SYLK file.
78
78
  """
79
+ path = coerce_path(path)
79
80
  return stub.write(path, data, format_name='SYLK')
etlplus/file/tab.py CHANGED
@@ -20,10 +20,10 @@ Notes
20
20
 
21
21
  from __future__ import annotations
22
22
 
23
- from pathlib import Path
24
-
25
23
  from ..types import JSONData
26
24
  from ..types import JSONList
25
+ from ..types import StrPath
26
+ from ._io import coerce_path
27
27
  from ._io import read_delimited
28
28
  from ._io import write_delimited
29
29
 
@@ -41,14 +41,14 @@ __all__ = [
41
41
 
42
42
 
43
43
  def read(
44
- path: Path,
44
+ path: StrPath,
45
45
  ) -> JSONList:
46
46
  """
47
47
  Read TAB content from *path*.
48
48
 
49
49
  Parameters
50
50
  ----------
51
- path : Path
51
+ path : StrPath
52
52
  Path to the TAB file on disk.
53
53
 
54
54
  Returns
@@ -60,7 +60,7 @@ def read(
60
60
 
61
61
 
62
62
  def write(
63
- path: Path,
63
+ path: StrPath,
64
64
  data: JSONData,
65
65
  ) -> int:
66
66
  """
@@ -68,7 +68,7 @@ def write(
68
68
 
69
69
  Parameters
70
70
  ----------
71
- path : Path
71
+ path : StrPath
72
72
  Path to the TAB file on disk.
73
73
  data : JSONData
74
74
  Data to write as TAB file. Should be a list of dictionaries or a
@@ -79,4 +79,5 @@ def write(
79
79
  int
80
80
  The number of rows written to the TAB file.
81
81
  """
82
- return write_delimited(path, data, delimiter='\t')
82
+ path = coerce_path(path)
83
+ return write_delimited(path, data, delimiter='\t', format_name='TAB')
etlplus/file/toml.py CHANGED
@@ -19,13 +19,16 @@ Notes
19
19
  from __future__ import annotations
20
20
 
21
21
  import tomllib
22
- from pathlib import Path
23
22
  from typing import Any
24
23
  from typing import cast
25
24
 
26
25
  from ..types import JSONData
27
26
  from ..types import JSONDict
27
+ from ..types import StrPath
28
28
  from ._imports import get_optional_module
29
+ from ._io import coerce_path
30
+ from ._io import ensure_parent_dir
31
+ from ._io import require_dict_payload
29
32
 
30
33
  # SECTION: EXPORTS ========================================================== #
31
34
 
@@ -41,14 +44,14 @@ __all__ = [
41
44
 
42
45
 
43
46
  def read(
44
- path: Path,
47
+ path: StrPath,
45
48
  ) -> JSONData:
46
49
  """
47
50
  Read TOML content from *path*.
48
51
 
49
52
  Parameters
50
53
  ----------
51
- path : Path
54
+ path : StrPath
52
55
  Path to the TOML file on disk.
53
56
 
54
57
  Returns
@@ -61,6 +64,7 @@ def read(
61
64
  TypeError
62
65
  If the TOML root is not a table (dictionary).
63
66
  """
67
+ path = coerce_path(path)
64
68
  payload = tomllib.loads(path.read_text(encoding='utf-8'))
65
69
  if isinstance(payload, dict):
66
70
  return payload
@@ -68,7 +72,7 @@ def read(
68
72
 
69
73
 
70
74
  def write(
71
- path: Path,
75
+ path: StrPath,
72
76
  data: JSONData,
73
77
  ) -> int:
74
78
  """
@@ -76,7 +80,7 @@ def write(
76
80
 
77
81
  Parameters
78
82
  ----------
79
- path : Path
83
+ path : StrPath
80
84
  Path to the TOML file on disk.
81
85
  data : JSONData
82
86
  Data to write as TOML. Should be a dictionary.
@@ -85,16 +89,9 @@ def write(
85
89
  -------
86
90
  int
87
91
  The number of records written to the TOML file.
88
-
89
- Raises
90
- ------
91
- TypeError
92
- If *data* is not a dictionary.
93
92
  """
94
- if isinstance(data, list):
95
- raise TypeError('TOML payloads must be a dict')
96
- if not isinstance(data, dict):
97
- raise TypeError('TOML payloads must be a dict')
93
+ path = coerce_path(path)
94
+ payload = require_dict_payload(data, format_name='TOML')
98
95
 
99
96
  toml_writer: Any
100
97
  try:
@@ -105,7 +102,7 @@ def write(
105
102
  'Install with: pip install tomli-w'
106
103
  ),
107
104
  )
108
- content = toml_writer.dumps(cast(JSONDict, data))
105
+ content = toml_writer.dumps(cast(JSONDict, payload))
109
106
  except ImportError:
110
107
  toml = get_optional_module(
111
108
  'toml',
@@ -115,8 +112,8 @@ def write(
115
112
  'Install with: pip install tomli-w'
116
113
  ),
117
114
  )
118
- content = toml.dumps(cast(JSONDict, data))
115
+ content = toml.dumps(cast(JSONDict, payload))
119
116
 
120
- path.parent.mkdir(parents=True, exist_ok=True)
117
+ ensure_parent_dir(path)
121
118
  path.write_text(content, encoding='utf-8')
122
119
  return 1
etlplus/file/tsv.py CHANGED
@@ -19,10 +19,10 @@ Notes
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from pathlib import Path
23
-
24
22
  from ..types import JSONData
25
23
  from ..types import JSONList
24
+ from ..types import StrPath
25
+ from ._io import coerce_path
26
26
  from ._io import read_delimited
27
27
  from ._io import write_delimited
28
28
 
@@ -40,14 +40,14 @@ __all__ = [
40
40
 
41
41
 
42
42
  def read(
43
- path: Path,
43
+ path: StrPath,
44
44
  ) -> JSONList:
45
45
  """
46
46
  Read TSV content from *path*.
47
47
 
48
48
  Parameters
49
49
  ----------
50
- path : Path
50
+ path : StrPath
51
51
  Path to the TSV file on disk.
52
52
 
53
53
  Returns
@@ -59,7 +59,7 @@ def read(
59
59
 
60
60
 
61
61
  def write(
62
- path: Path,
62
+ path: StrPath,
63
63
  data: JSONData,
64
64
  ) -> int:
65
65
  """
@@ -67,7 +67,7 @@ def write(
67
67
 
68
68
  Parameters
69
69
  ----------
70
- path : Path
70
+ path : StrPath
71
71
  Path to the TSV file on disk.
72
72
  data : JSONData
73
73
  Data to write as TSV. Should be a list of dictionaries or a
@@ -78,4 +78,5 @@ def write(
78
78
  int
79
79
  The number of rows written to the TSV file.
80
80
  """
81
- return write_delimited(path, data, delimiter='\t')
81
+ path = coerce_path(path)
82
+ return write_delimited(path, data, delimiter='\t', format_name='TSV')
etlplus/file/txt.py CHANGED
@@ -16,11 +16,12 @@ Notes
16
16
 
17
17
  from __future__ import annotations
18
18
 
19
- from pathlib import Path
20
-
21
19
  from ..types import JSONData
22
20
  from ..types import JSONList
21
+ from ..types import StrPath
23
22
  from ..utils import count_records
23
+ from ._io import coerce_path
24
+ from ._io import ensure_parent_dir
24
25
  from ._io import normalize_records
25
26
 
26
27
  # SECTION: EXPORTS ========================================================== #
@@ -37,14 +38,14 @@ __all__ = [
37
38
 
38
39
 
39
40
  def read(
40
- path: Path,
41
+ path: StrPath,
41
42
  ) -> JSONList:
42
43
  """
43
44
  Read TXT content from *path*.
44
45
 
45
46
  Parameters
46
47
  ----------
47
- path : Path
48
+ path : StrPath
48
49
  Path to the TXT file on disk.
49
50
 
50
51
  Returns
@@ -52,6 +53,7 @@ def read(
52
53
  JSONList
53
54
  The list of dictionaries read from the TXT file.
54
55
  """
56
+ path = coerce_path(path)
55
57
  rows: JSONList = []
56
58
  with path.open('r', encoding='utf-8') as handle:
57
59
  for line in handle:
@@ -63,7 +65,7 @@ def read(
63
65
 
64
66
 
65
67
  def write(
66
- path: Path,
68
+ path: StrPath,
67
69
  data: JSONData,
68
70
  ) -> int:
69
71
  """
@@ -71,7 +73,7 @@ def write(
71
73
 
72
74
  Parameters
73
75
  ----------
74
- path : Path
76
+ path : StrPath
75
77
  Path to the TXT file on disk.
76
78
  data : JSONData
77
79
  Data to write. Expects ``{'text': '...'} `` or a list of those.
@@ -87,12 +89,13 @@ def write(
87
89
  If any item in *data* is not a dictionary or if any dictionary
88
90
  does not contain a ``'text'`` key.
89
91
  """
92
+ path = coerce_path(path)
90
93
  rows = normalize_records(data, 'TXT')
91
94
 
92
95
  if not rows:
93
96
  return 0
94
97
 
95
- path.parent.mkdir(parents=True, exist_ok=True)
98
+ ensure_parent_dir(path)
96
99
  with path.open('w', encoding='utf-8') as handle:
97
100
  for row in rows:
98
101
  if 'text' not in row:
etlplus/file/vm.py CHANGED
@@ -19,11 +19,11 @@ Notes
19
19
 
20
20
  from __future__ import annotations
21
21
 
22
- from pathlib import Path
23
-
24
22
  from ..types import JSONData
25
23
  from ..types import JSONList
24
+ from ..types import StrPath
26
25
  from . import stub
26
+ from ._io import coerce_path
27
27
 
28
28
  # SECTION: EXPORTS ========================================================== #
29
29
 
@@ -39,14 +39,14 @@ __all__ = [
39
39
 
40
40
 
41
41
  def read(
42
- path: Path,
42
+ path: StrPath,
43
43
  ) -> JSONList:
44
44
  """
45
45
  Read VM content from *path*.
46
46
 
47
47
  Parameters
48
48
  ----------
49
- path : Path
49
+ path : StrPath
50
50
  Path to the VM file on disk.
51
51
 
52
52
  Returns
@@ -58,7 +58,7 @@ def read(
58
58
 
59
59
 
60
60
  def write(
61
- path: Path,
61
+ path: StrPath,
62
62
  data: JSONData,
63
63
  ) -> int:
64
64
  """
@@ -66,7 +66,7 @@ def write(
66
66
 
67
67
  Parameters
68
68
  ----------
69
- path : Path
69
+ path : StrPath
70
70
  Path to the VM file on disk.
71
71
  data : JSONData
72
72
  Data to write as VM file. Should be a list of dictionaries or a single
@@ -77,4 +77,5 @@ def write(
77
77
  int
78
78
  The number of rows written to the VM file.
79
79
  """
80
+ path = coerce_path(path)
80
81
  return stub.write(path, data, format_name='VM')
etlplus/file/wks.py CHANGED
@@ -18,11 +18,11 @@ Notes
18
18
 
19
19
  from __future__ import annotations
20
20
 
21
- from pathlib import Path
22
-
23
21
  from ..types import JSONData
24
22
  from ..types import JSONList
23
+ from ..types import StrPath
25
24
  from . import stub
25
+ from ._io import coerce_path
26
26
 
27
27
  # SECTION: EXPORTS ========================================================== #
28
28
 
@@ -38,14 +38,14 @@ __all__ = [
38
38
 
39
39
 
40
40
  def read(
41
- path: Path,
41
+ path: StrPath,
42
42
  ) -> JSONList:
43
43
  """
44
44
  Read WKS content from *path*.
45
45
 
46
46
  Parameters
47
47
  ----------
48
- path : Path
48
+ path : StrPath
49
49
  Path to the WKS file on disk.
50
50
 
51
51
  Returns
@@ -57,7 +57,7 @@ def read(
57
57
 
58
58
 
59
59
  def write(
60
- path: Path,
60
+ path: StrPath,
61
61
  data: JSONData,
62
62
  ) -> int:
63
63
  """
@@ -65,7 +65,7 @@ def write(
65
65
 
66
66
  Parameters
67
67
  ----------
68
- path : Path
68
+ path : StrPath
69
69
  Path to the WKS file on disk.
70
70
  data : JSONData
71
71
  Data to write as WKS file. Should be a list of dictionaries or a
@@ -76,4 +76,5 @@ def write(
76
76
  int
77
77
  The number of rows written to the WKS file.
78
78
  """
79
+ path = coerce_path(path)
79
80
  return stub.write(path, data, format_name='WKS')