etlplus 0.12.10__py3-none-any.whl → 0.14.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. etlplus/README.md +1 -1
  2. etlplus/__init__.py +1 -26
  3. etlplus/api/__init__.py +10 -0
  4. etlplus/api/config.py +36 -20
  5. etlplus/api/endpoint_client.py +3 -3
  6. etlplus/api/enums.py +51 -0
  7. etlplus/api/pagination/client.py +1 -1
  8. etlplus/api/rate_limiting/config.py +13 -1
  9. etlplus/api/rate_limiting/rate_limiter.py +8 -11
  10. etlplus/api/request_manager.py +11 -6
  11. etlplus/api/transport.py +14 -2
  12. etlplus/api/types.py +7 -6
  13. etlplus/{run_helpers.py → api/utils.py} +205 -153
  14. etlplus/cli/handlers.py +17 -7
  15. etlplus/config/jobs.py +14 -4
  16. etlplus/dag.py +103 -0
  17. etlplus/enums.py +0 -32
  18. etlplus/file/cfg.py +2 -2
  19. etlplus/file/conf.py +2 -2
  20. etlplus/file/dta.py +77 -0
  21. etlplus/file/enums.py +10 -4
  22. etlplus/file/hbs.py +78 -0
  23. etlplus/file/hdf5.py +78 -0
  24. etlplus/file/jinja2.py +78 -0
  25. etlplus/file/mat.py +78 -0
  26. etlplus/file/mustache.py +78 -0
  27. etlplus/file/nc.py +78 -0
  28. etlplus/file/numbers.py +75 -0
  29. etlplus/file/ods.py +79 -0
  30. etlplus/file/properties.py +13 -13
  31. etlplus/file/rda.py +78 -0
  32. etlplus/file/rds.py +78 -0
  33. etlplus/file/sas7bdat.py +78 -0
  34. etlplus/file/sav.py +77 -0
  35. etlplus/file/sylk.py +77 -0
  36. etlplus/file/toml.py +1 -1
  37. etlplus/file/vm.py +78 -0
  38. etlplus/file/wks.py +77 -0
  39. etlplus/file/xlsm.py +79 -0
  40. etlplus/file/xpt.py +78 -0
  41. etlplus/file/zsav.py +77 -0
  42. etlplus/{validation → ops}/README.md +2 -2
  43. etlplus/ops/__init__.py +61 -0
  44. etlplus/{extract.py → ops/extract.py} +78 -94
  45. etlplus/{load.py → ops/load.py} +73 -93
  46. etlplus/{run.py → ops/run.py} +140 -110
  47. etlplus/{transform.py → ops/transform.py} +75 -68
  48. etlplus/{validation → ops}/utils.py +80 -15
  49. etlplus/{validate.py → ops/validate.py} +19 -9
  50. etlplus/types.py +2 -2
  51. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/METADATA +91 -60
  52. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/RECORD +56 -35
  53. etlplus/validation/__init__.py +0 -44
  54. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/WHEEL +0 -0
  55. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/entry_points.txt +0 -0
  56. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/licenses/LICENSE +0 -0
  57. {etlplus-0.12.10.dist-info → etlplus-0.14.3.dist-info}/top_level.txt +0 -0
etlplus/dag.py ADDED
@@ -0,0 +1,103 @@
1
+ """
2
+ :mod:`etlplus.dag` module.
3
+
4
+ Lightweight directed acyclic graph (DAG) helpers for ordering jobs based on
5
+ ``depends_on``.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ from collections import deque
11
+ from dataclasses import dataclass
12
+
13
+ from .config.jobs import JobConfig
14
+
15
+ # SECTION: EXPORTS ========================================================== #
16
+
17
+
18
+ __all__ = [
19
+ 'DagError',
20
+ 'topological_sort_jobs',
21
+ ]
22
+
23
+
24
+ # SECTION: ERRORS =========================================================== #
25
+
26
+
27
+ @dataclass(slots=True)
28
+ class DagError(ValueError):
29
+ """
30
+ Raised when the job dependency graph is invalid.
31
+
32
+ Attributes
33
+ ----------
34
+ message : str
35
+ Error message.
36
+ """
37
+
38
+ # -- Attributes -- #
39
+
40
+ message: str
41
+
42
+ # -- Magic Methods (Object Representation) -- #
43
+
44
+ def __str__(self) -> str:
45
+ return self.message
46
+
47
+
48
+ # SECTION: FUNCTIONS ======================================================== #
49
+
50
+
51
+ def topological_sort_jobs(
52
+ jobs: list[JobConfig],
53
+ ) -> list[JobConfig]:
54
+ """
55
+ Return jobs in topological order based on ``depends_on``.
56
+
57
+ Parameters
58
+ ----------
59
+ jobs : list[JobConfig]
60
+ List of job configurations to sort.
61
+
62
+ Returns
63
+ -------
64
+ list[JobConfig]
65
+ Jobs sorted in topological order.
66
+
67
+ Raises
68
+ ------
69
+ DagError
70
+ If a dependency is missing, self-referential, or when a cycle is
71
+ detected.
72
+ """
73
+ index = {job.name: job for job in jobs}
74
+ edges: dict[str, set[str]] = {name: set() for name in index}
75
+ indegree: dict[str, int] = {name: 0 for name in index}
76
+
77
+ for job in jobs:
78
+ for dep in job.depends_on:
79
+ if dep not in index:
80
+ raise DagError(
81
+ f'Unknown dependency "{dep}" in job "{job.name}"',
82
+ )
83
+ if dep == job.name:
84
+ raise DagError(f'Job "{job.name}" depends on itself')
85
+ if job.name not in edges[dep]:
86
+ edges[dep].add(job.name)
87
+ indegree[job.name] += 1
88
+
89
+ queue = deque(sorted(name for name, deg in indegree.items() if deg == 0))
90
+ ordered: list[str] = []
91
+
92
+ while queue:
93
+ name = queue.popleft()
94
+ ordered.append(name)
95
+ for child in sorted(edges[name]):
96
+ indegree[child] -= 1
97
+ if indegree[child] == 0:
98
+ queue.append(child)
99
+
100
+ if len(ordered) != len(jobs):
101
+ raise DagError('Dependency cycle detected')
102
+
103
+ return [index[name] for name in ordered]
etlplus/enums.py CHANGED
@@ -23,7 +23,6 @@ __all__ = [
23
23
  'AggregateName',
24
24
  'CoercibleStrEnum',
25
25
  'DataConnectorType',
26
- 'HttpMethod',
27
26
  'OperatorName',
28
27
  'PipelineStep',
29
28
  ]
@@ -200,37 +199,6 @@ class DataConnectorType(CoercibleStrEnum):
200
199
  }
201
200
 
202
201
 
203
- class HttpMethod(CoercibleStrEnum):
204
- """Supported HTTP verbs that accept JSON payloads."""
205
-
206
- # -- Constants -- #
207
-
208
- CONNECT = 'connect'
209
- DELETE = 'delete'
210
- GET = 'get'
211
- HEAD = 'head'
212
- OPTIONS = 'options'
213
- PATCH = 'patch'
214
- POST = 'post'
215
- PUT = 'put'
216
- TRACE = 'trace'
217
-
218
- # -- Getters -- #
219
-
220
- @property
221
- def allows_body(self) -> bool:
222
- """
223
- Whether the method typically allows a request body.
224
-
225
- Notes
226
- -----
227
- - RFCs do not strictly forbid bodies on some other methods (e.g.,
228
- ``DELETE``), but many servers/clients do not expect them. We mark
229
- ``POST``, ``PUT``, and ``PATCH`` as True.
230
- """
231
- return self in {HttpMethod.POST, HttpMethod.PUT, HttpMethod.PATCH}
232
-
233
-
234
202
  class OperatorName(CoercibleStrEnum):
235
203
  """Supported comparison operators with helpers."""
236
204
 
etlplus/file/cfg.py CHANGED
@@ -5,8 +5,8 @@ Helpers for reading/writing config (CFG) files.
5
5
 
6
6
  Notes
7
7
  -----
8
- - A CFG-formatted” file is a configuration file that may use various
9
- syntaxes, such as INI, YAML, or custom formats.
8
+ - A CFG file is a configuration file that may use various syntaxes, such as
9
+ INI, YAML, or custom formats.
10
10
  - Common cases:
11
11
  - INI-style key-value pairs with sections (such as in Python ecosystems,
12
12
  using ``configparser``).
etlplus/file/conf.py CHANGED
@@ -5,8 +5,8 @@ Helpers for reading/writing config (CONF) files.
5
5
 
6
6
  Notes
7
7
  -----
8
- - A CONF-formatted” file is a configuration file that may use various
9
- syntaxes, such as INI, YAML, or custom formats.
8
+ - A CONF file is a configuration file that may use various syntaxes, such as
9
+ INI, YAML, or custom formats.
10
10
  - Common cases:
11
11
  - INI-style key-value pairs with sections.
12
12
  - YAML-like structures with indentation.
etlplus/file/dta.py ADDED
@@ -0,0 +1,77 @@
1
+ """
2
+ :mod:`etlplus.file.dta` module.
3
+
4
+ Helpers for reading/writing Stata (DTA) data files.
5
+
6
+ Notes
7
+ -----
8
+ - Stata DTA files are binary files used by Stata statistical software that
9
+ store datasets with variables, labels, and data types.
10
+ - Common cases:
11
+ - Reading data for analysis in Python.
12
+ - Writing processed data back to Stata format.
13
+ - Rule of thumb:
14
+ - If you need to work with Stata data files, use this module for reading
15
+ and writing.
16
+ """
17
+
18
+ from __future__ import annotations
19
+
20
+ from pathlib import Path
21
+
22
+ from ..types import JSONData
23
+ from ..types import JSONList
24
+ from . import stub
25
+
26
+ # SECTION: EXPORTS ========================================================== #
27
+
28
+
29
+ __all__ = [
30
+ 'read',
31
+ 'write',
32
+ ]
33
+
34
+
35
+ # SECTION: FUNCTIONS ======================================================== #
36
+
37
+
38
+ def read(
39
+ path: Path,
40
+ ) -> JSONList:
41
+ """
42
+ Read DTA content from ``path``.
43
+
44
+ Parameters
45
+ ----------
46
+ path : Path
47
+ Path to the DTA file on disk.
48
+
49
+ Returns
50
+ -------
51
+ JSONList
52
+ The list of dictionaries read from the DTA file.
53
+ """
54
+ return stub.read(path, format_name='DTA')
55
+
56
+
57
+ def write(
58
+ path: Path,
59
+ data: JSONData,
60
+ ) -> int:
61
+ """
62
+ Write ``data`` to DTA file at ``path`` and return record count.
63
+
64
+ Parameters
65
+ ----------
66
+ path : Path
67
+ Path to the DTA file on disk.
68
+ data : JSONData
69
+ Data to write as DTA file. Should be a list of dictionaries or a
70
+ single dictionary.
71
+
72
+ Returns
73
+ -------
74
+ int
75
+ The number of rows written to the DTA file.
76
+ """
77
+ return stub.write(path, data, format_name='DTA')
etlplus/file/enums.py CHANGED
@@ -79,7 +79,7 @@ class FileFormat(CoercibleStrEnum):
79
79
  INI = 'ini' # INI-style key-value pairs
80
80
  JSON = 'json' # JavaScript Object Notation
81
81
  NDJSON = 'ndjson' # Newline-Delimited JSON
82
- PROPS = 'properties' # Java-style key-value pairs
82
+ PROPERTIES = 'properties' # Java-style key-value pairs
83
83
  TOML = 'toml' # Tom's Obvious Minimal Language
84
84
  XML = 'xml' # Extensible Markup Language
85
85
  YAML = 'yaml' # YAML Ain't Markup Language
@@ -108,7 +108,7 @@ class FileFormat(CoercibleStrEnum):
108
108
 
109
109
  # Spreadsheets
110
110
  NUMBERS = 'numbers' # Apple Numbers spreadsheet
111
- ODS = 'ods' # OpenDocument Spreadsheet
111
+ ODS = 'ods' # OpenDocument spreadsheet
112
112
  WKS = 'wks' # Lotus 1-2-3 spreadsheet
113
113
  XLS = 'xls' # Microsoft Excel (BIFF); read-only
114
114
  XLSM = 'xlsm' # Microsoft Excel Macro-Enabled (Open XML)
@@ -116,14 +116,14 @@ class FileFormat(CoercibleStrEnum):
116
116
 
117
117
  # Statistical / scientific / numeric computing
118
118
  DTA = 'dta' # Stata data file
119
- H5 = 'h5' # Hierarchical Data Format
119
+ HDF5 = 'hdf5' # Hierarchical Data Format
120
120
  MAT = 'mat' # MATLAB data file
121
121
  NC = 'nc' # NetCDF data file
122
122
  RDA = 'rda' # RData workspace/object bundle
123
123
  RDS = 'rds' # R data file
124
124
  SAS7BDAT = 'sas7bdat' # SAS data file
125
125
  SAV = 'sav' # SPSS data file
126
- SYLK = 'sylk' # Symbolic Link (SYmbolic LinK)
126
+ SYLK = 'sylk' # Symbolic Link
127
127
  XPT = 'xpt' # SAS Transport file
128
128
  ZSAV = 'zsav' # Compressed SPSS data file
129
129
 
@@ -175,6 +175,12 @@ class FileFormat(CoercibleStrEnum):
175
175
 
176
176
  # Domain-specific & less common
177
177
 
178
+ # Templates
179
+ HBS = 'hbs' # Handlebars
180
+ JINJA2 = 'jinja2' # Jinja2
181
+ MUSTACHE = 'mustache' # Mustache
182
+ VM = 'vm' # Apache Velocity
183
+
178
184
  # -- Class Methods -- #
179
185
 
180
186
  @classmethod
etlplus/file/hbs.py ADDED
@@ -0,0 +1,78 @@
1
+ """
2
+ :mod:`etlplus.file.hbs` module.
3
+
4
+ Helpers for reading/writing Handlebars (HBS) template files.
5
+
6
+ Notes
7
+ -----
8
+ - A Handlebars (HBS) template file is a text file used for generating HTML or
9
+ other text formats by combining templates with data.
10
+ - Common cases:
11
+ - HTML templates.
12
+ - Email templates.
13
+ - Configuration files.
14
+ - Rule of thumb:
15
+ - If you need to work with Handlebars template files, use this module for
16
+ reading and writing.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ from pathlib import Path
22
+
23
+ from ..types import JSONData
24
+ from ..types import JSONList
25
+ from . import stub
26
+
27
+ # SECTION: EXPORTS ========================================================== #
28
+
29
+
30
+ __all__ = [
31
+ 'read',
32
+ 'write',
33
+ ]
34
+
35
+
36
+ # SECTION: FUNCTIONS ======================================================== #
37
+
38
+
39
+ def read(
40
+ path: Path,
41
+ ) -> JSONList:
42
+ """
43
+ Read ZSAV content from ``path``.
44
+
45
+ Parameters
46
+ ----------
47
+ path : Path
48
+ Path to the HBS file on disk.
49
+
50
+ Returns
51
+ -------
52
+ JSONList
53
+ The list of dictionaries read from the HBS file.
54
+ """
55
+ return stub.read(path, format_name='HBS')
56
+
57
+
58
+ def write(
59
+ path: Path,
60
+ data: JSONData,
61
+ ) -> int:
62
+ """
63
+ Write ``data`` to HBS file at ``path`` and return record count.
64
+
65
+ Parameters
66
+ ----------
67
+ path : Path
68
+ Path to the HBS file on disk.
69
+ data : JSONData
70
+ Data to write as HBS file. Should be a list of dictionaries or a
71
+ single dictionary.
72
+
73
+ Returns
74
+ -------
75
+ int
76
+ The number of rows written to the HBS file.
77
+ """
78
+ return stub.write(path, data, format_name='HBS')
etlplus/file/hdf5.py ADDED
@@ -0,0 +1,78 @@
1
+ """
2
+ :mod:`etlplus.file.hdf5` module.
3
+
4
+ Helpers for reading/writing Hierarchical Data Format (HDF5) files.
5
+
6
+ Notes
7
+ -----
8
+ - A HDF5 file is a binary file format designed to store and organize large
9
+ amounts of data.
10
+ - Common cases:
11
+ - Scientific data storage and sharing.
12
+ - Large-scale data analysis.
13
+ - Hierarchical data organization.
14
+ - Rule of thumb:
15
+ - If the file follows the HDF5 specification, use this module for reading
16
+ and writing.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ from pathlib import Path
22
+
23
+ from ..types import JSONData
24
+ from ..types import JSONList
25
+ from . import stub
26
+
27
+ # SECTION: EXPORTS ========================================================== #
28
+
29
+
30
+ __all__ = [
31
+ 'read',
32
+ 'write',
33
+ ]
34
+
35
+
36
+ # SECTION: FUNCTIONS ======================================================== #
37
+
38
+
39
+ def read(
40
+ path: Path,
41
+ ) -> JSONList:
42
+ """
43
+ Read HDF5 content from ``path``.
44
+
45
+ Parameters
46
+ ----------
47
+ path : Path
48
+ Path to the HDF5 file on disk.
49
+
50
+ Returns
51
+ -------
52
+ JSONList
53
+ The list of dictionaries read from the HDF5 file.
54
+ """
55
+ return stub.read(path, format_name='HDF5')
56
+
57
+
58
+ def write(
59
+ path: Path,
60
+ data: JSONData,
61
+ ) -> int:
62
+ """
63
+ Write ``data`` to HDF5 file at ``path`` and return record count.
64
+
65
+ Parameters
66
+ ----------
67
+ path : Path
68
+ Path to the HDF5 file on disk.
69
+ data : JSONData
70
+ Data to write as HDF5 file. Should be a list of dictionaries or a
71
+ single dictionary.
72
+
73
+ Returns
74
+ -------
75
+ int
76
+ The number of rows written to the HDF5 file.
77
+ """
78
+ return stub.write(path, data, format_name='HDF5')
etlplus/file/jinja2.py ADDED
@@ -0,0 +1,78 @@
1
+ """
2
+ :mod:`etlplus.file.jinja2` module.
3
+
4
+ Helpers for reading/writing compressed Jinja2 (JINJA2) template files.
5
+
6
+ Notes
7
+ -----
8
+ - A JINJA2 file is a text file used for generating HTML or other text formats
9
+ by combining templates with data.
10
+ - Common cases:
11
+ - HTML templates.
12
+ - Email templates.
13
+ - Configuration files.
14
+ - Rule of thumb:
15
+ - If you need to work with Jinja2 template files, use this module for
16
+ reading and writing.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ from pathlib import Path
22
+
23
+ from ..types import JSONData
24
+ from ..types import JSONList
25
+ from . import stub
26
+
27
+ # SECTION: EXPORTS ========================================================== #
28
+
29
+
30
+ __all__ = [
31
+ 'read',
32
+ 'write',
33
+ ]
34
+
35
+
36
+ # SECTION: FUNCTIONS ======================================================== #
37
+
38
+
39
+ def read(
40
+ path: Path,
41
+ ) -> JSONList:
42
+ """
43
+ Read JINJA2 content from ``path``.
44
+
45
+ Parameters
46
+ ----------
47
+ path : Path
48
+ Path to the JINJA2 file on disk.
49
+
50
+ Returns
51
+ -------
52
+ JSONList
53
+ The list of dictionaries read from the JINJA2 file.
54
+ """
55
+ return stub.read(path, format_name='JINJA2')
56
+
57
+
58
+ def write(
59
+ path: Path,
60
+ data: JSONData,
61
+ ) -> int:
62
+ """
63
+ Write ``data`` to JINJA2 file at ``path`` and return record count.
64
+
65
+ Parameters
66
+ ----------
67
+ path : Path
68
+ Path to the JINJA2 file on disk.
69
+ data : JSONData
70
+ Data to write as JINJA2 file. Should be a list of dictionaries or a
71
+ single dictionary.
72
+
73
+ Returns
74
+ -------
75
+ int
76
+ The number of rows written to the JINJA2 file.
77
+ """
78
+ return stub.write(path, data, format_name='JINJA2')
etlplus/file/mat.py ADDED
@@ -0,0 +1,78 @@
1
+ """
2
+ :mod:`etlplus.file.mat` module.
3
+
4
+ Helpers for reading/writing MATLAB (MAT) data files.
5
+
6
+ Notes
7
+ -----
8
+ - A MAT file is a binary file format used by MATLAB to store variables,
9
+ arrays, and other data structures.
10
+ - Common cases:
11
+ - Storing numerical arrays and matrices.
12
+ - Saving workspace variables.
13
+ - Sharing data between MATLAB and other programming environments.
14
+ - Rule of thumb:
15
+ - If the file follows the MAT-file specification, use this module for
16
+ reading and writing.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ from pathlib import Path
22
+
23
+ from ..types import JSONData
24
+ from ..types import JSONList
25
+ from . import stub
26
+
27
+ # SECTION: EXPORTS ========================================================== #
28
+
29
+
30
+ __all__ = [
31
+ 'read',
32
+ 'write',
33
+ ]
34
+
35
+
36
+ # SECTION: FUNCTIONS ======================================================== #
37
+
38
+
39
+ def read(
40
+ path: Path,
41
+ ) -> JSONList:
42
+ """
43
+ Read MAT content from ``path``.
44
+
45
+ Parameters
46
+ ----------
47
+ path : Path
48
+ Path to the MAT file on disk.
49
+
50
+ Returns
51
+ -------
52
+ JSONList
53
+ The list of dictionaries read from the MAT file.
54
+ """
55
+ return stub.read(path, format_name='MAT')
56
+
57
+
58
+ def write(
59
+ path: Path,
60
+ data: JSONData,
61
+ ) -> int:
62
+ """
63
+ Write ``data`` to MAT file at ``path`` and return record count.
64
+
65
+ Parameters
66
+ ----------
67
+ path : Path
68
+ Path to the MAT file on disk.
69
+ data : JSONData
70
+ Data to write as MAT file. Should be a list of dictionaries or a
71
+ single dictionary.
72
+
73
+ Returns
74
+ -------
75
+ int
76
+ The number of rows written to the MAT file.
77
+ """
78
+ return stub.write(path, data, format_name='MAT')
@@ -0,0 +1,78 @@
1
+ """
2
+ :mod:`etlplus.file.mustache` module.
3
+
4
+ Helpers for reading/writing Mustache (MUSTACHE) template files.
5
+
6
+ Notes
7
+ -----
8
+ - A MUSTACHE file is a text file used for generating HTML or other text formats
9
+ by combining templates with data.
10
+ - Common cases:
11
+ - HTML templates.
12
+ - Email templates.
13
+ - Configuration files.
14
+ - Rule of thumb:
15
+ - If you need to work with Mustache template files, use this module for
16
+ reading and writing.
17
+ """
18
+
19
+ from __future__ import annotations
20
+
21
+ from pathlib import Path
22
+
23
+ from ..types import JSONData
24
+ from ..types import JSONList
25
+ from . import stub
26
+
27
+ # SECTION: EXPORTS ========================================================== #
28
+
29
+
30
+ __all__ = [
31
+ 'read',
32
+ 'write',
33
+ ]
34
+
35
+
36
+ # SECTION: FUNCTIONS ======================================================== #
37
+
38
+
39
+ def read(
40
+ path: Path,
41
+ ) -> JSONList:
42
+ """
43
+ Read MUSTACHE content from ``path``.
44
+
45
+ Parameters
46
+ ----------
47
+ path : Path
48
+ Path to the MUSTACHE file on disk.
49
+
50
+ Returns
51
+ -------
52
+ JSONList
53
+ The list of dictionaries read from the MUSTACHE file.
54
+ """
55
+ return stub.read(path, format_name='MUSTACHE')
56
+
57
+
58
+ def write(
59
+ path: Path,
60
+ data: JSONData,
61
+ ) -> int:
62
+ """
63
+ Write ``data`` to MUSTACHE file at ``path`` and return record count.
64
+
65
+ Parameters
66
+ ----------
67
+ path : Path
68
+ Path to the MUSTACHE file on disk.
69
+ data : JSONData
70
+ Data to write as MUSTACHE file. Should be a list of dictionaries or a
71
+ single dictionary.
72
+
73
+ Returns
74
+ -------
75
+ int
76
+ The number of rows written to the MUSTACHE file.
77
+ """
78
+ return stub.write(path, data, format_name='MUSTACHE')