etlplus 0.15.0__py3-none-any.whl → 0.16.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (118) hide show
  1. etlplus/README.md +3 -3
  2. etlplus/api/README.md +31 -0
  3. etlplus/api/auth.py +1 -1
  4. etlplus/api/config.py +5 -10
  5. etlplus/api/endpoint_client.py +4 -4
  6. etlplus/api/pagination/config.py +1 -1
  7. etlplus/api/pagination/paginator.py +6 -7
  8. etlplus/api/rate_limiting/config.py +4 -4
  9. etlplus/api/rate_limiting/rate_limiter.py +1 -1
  10. etlplus/api/retry_manager.py +2 -2
  11. etlplus/api/transport.py +1 -1
  12. etlplus/api/types.py +99 -0
  13. etlplus/api/utils.py +1 -1
  14. etlplus/cli/commands.py +75 -42
  15. etlplus/cli/constants.py +1 -1
  16. etlplus/cli/handlers.py +31 -13
  17. etlplus/cli/io.py +2 -2
  18. etlplus/cli/main.py +2 -2
  19. etlplus/cli/state.py +4 -7
  20. etlplus/connector/__init__.py +43 -0
  21. etlplus/connector/api.py +161 -0
  22. etlplus/connector/connector.py +26 -0
  23. etlplus/connector/core.py +132 -0
  24. etlplus/connector/database.py +122 -0
  25. etlplus/connector/enums.py +52 -0
  26. etlplus/connector/file.py +120 -0
  27. etlplus/connector/types.py +40 -0
  28. etlplus/connector/utils.py +122 -0
  29. etlplus/database/ddl.py +2 -2
  30. etlplus/database/engine.py +19 -3
  31. etlplus/database/orm.py +2 -0
  32. etlplus/enums.py +1 -33
  33. etlplus/file/_imports.py +1 -0
  34. etlplus/file/_io.py +52 -4
  35. etlplus/file/accdb.py +3 -2
  36. etlplus/file/arrow.py +3 -2
  37. etlplus/file/avro.py +3 -2
  38. etlplus/file/bson.py +3 -2
  39. etlplus/file/cbor.py +3 -2
  40. etlplus/file/cfg.py +3 -2
  41. etlplus/file/conf.py +3 -2
  42. etlplus/file/core.py +11 -8
  43. etlplus/file/csv.py +3 -2
  44. etlplus/file/dat.py +3 -2
  45. etlplus/file/dta.py +3 -2
  46. etlplus/file/duckdb.py +3 -2
  47. etlplus/file/enums.py +1 -1
  48. etlplus/file/feather.py +3 -2
  49. etlplus/file/fwf.py +3 -2
  50. etlplus/file/gz.py +3 -2
  51. etlplus/file/hbs.py +3 -2
  52. etlplus/file/hdf5.py +3 -2
  53. etlplus/file/ini.py +3 -2
  54. etlplus/file/ion.py +3 -2
  55. etlplus/file/jinja2.py +3 -2
  56. etlplus/file/json.py +5 -16
  57. etlplus/file/log.py +3 -2
  58. etlplus/file/mat.py +3 -2
  59. etlplus/file/mdb.py +3 -2
  60. etlplus/file/msgpack.py +3 -2
  61. etlplus/file/mustache.py +3 -2
  62. etlplus/file/nc.py +3 -2
  63. etlplus/file/ndjson.py +3 -2
  64. etlplus/file/numbers.py +3 -2
  65. etlplus/file/ods.py +3 -2
  66. etlplus/file/orc.py +3 -2
  67. etlplus/file/parquet.py +3 -2
  68. etlplus/file/pb.py +3 -2
  69. etlplus/file/pbf.py +3 -2
  70. etlplus/file/properties.py +3 -2
  71. etlplus/file/proto.py +3 -2
  72. etlplus/file/psv.py +3 -2
  73. etlplus/file/rda.py +3 -2
  74. etlplus/file/rds.py +3 -2
  75. etlplus/file/sas7bdat.py +3 -2
  76. etlplus/file/sav.py +3 -2
  77. etlplus/file/sqlite.py +3 -2
  78. etlplus/file/stub.py +1 -0
  79. etlplus/file/sylk.py +3 -2
  80. etlplus/file/tab.py +3 -2
  81. etlplus/file/toml.py +3 -2
  82. etlplus/file/tsv.py +3 -2
  83. etlplus/file/txt.py +4 -3
  84. etlplus/file/vm.py +3 -2
  85. etlplus/file/wks.py +3 -2
  86. etlplus/file/xls.py +3 -2
  87. etlplus/file/xlsm.py +3 -2
  88. etlplus/file/xlsx.py +3 -2
  89. etlplus/file/xml.py +9 -3
  90. etlplus/file/xpt.py +3 -2
  91. etlplus/file/yaml.py +5 -16
  92. etlplus/file/zip.py +3 -2
  93. etlplus/file/zsav.py +3 -2
  94. etlplus/ops/extract.py +13 -1
  95. etlplus/ops/load.py +15 -2
  96. etlplus/ops/run.py +4 -4
  97. etlplus/ops/transform.py +2 -2
  98. etlplus/ops/utils.py +6 -35
  99. etlplus/ops/validate.py +3 -3
  100. etlplus/types.py +3 -2
  101. etlplus/utils.py +163 -29
  102. etlplus/workflow/__init__.py +0 -11
  103. etlplus/workflow/jobs.py +84 -27
  104. etlplus/workflow/pipeline.py +48 -48
  105. {etlplus-0.15.0.dist-info → etlplus-0.16.0.dist-info}/METADATA +4 -4
  106. etlplus-0.16.0.dist-info/RECORD +141 -0
  107. {etlplus-0.15.0.dist-info → etlplus-0.16.0.dist-info}/WHEEL +1 -1
  108. etlplus/config/README.md +0 -50
  109. etlplus/config/__init__.py +0 -33
  110. etlplus/config/types.py +0 -140
  111. etlplus/dag.py +0 -103
  112. etlplus/workflow/connector.py +0 -373
  113. etlplus/workflow/types.py +0 -115
  114. etlplus/workflow/utils.py +0 -120
  115. etlplus-0.15.0.dist-info/RECORD +0 -139
  116. {etlplus-0.15.0.dist-info → etlplus-0.16.0.dist-info}/entry_points.txt +0 -0
  117. {etlplus-0.15.0.dist-info → etlplus-0.16.0.dist-info}/licenses/LICENSE +0 -0
  118. {etlplus-0.15.0.dist-info → etlplus-0.16.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,40 @@
1
+ """
2
+ :mod:`etlplus.connector.types` module.
3
+
4
+ Connector type aliases for :mod:`etlplus.connector`.
5
+
6
+ Examples
7
+ --------
8
+ >>> from etlplus.connector import Connector
9
+ >>> src: Connector = {
10
+ >>> "type": "file",
11
+ >>> "path": "/data/input.csv",
12
+ >>> }
13
+ >>> tgt: Connector = {
14
+ >>> "type": "database",
15
+ >>> "connection_string": "postgresql://user:pass@localhost/db",
16
+ >>> }
17
+ >>> from etlplus.api import RetryPolicy
18
+ >>> rp: RetryPolicy = {"max_attempts": 3, "backoff": 0.5}
19
+ """
20
+
21
+ from __future__ import annotations
22
+
23
+ from typing import Literal
24
+
25
+ from .enums import DataConnectorType
26
+
27
+ # SECTION: EXPORTS ========================================================= #
28
+
29
+
30
+ __all__ = [
31
+ # Type Aliases
32
+ 'ConnectorType',
33
+ ]
34
+
35
+
36
+ # SECTION: TYPE ALIASES ===================================================== #
37
+
38
+
39
+ # Literal type for supported connector kinds (strings or enum members)
40
+ type ConnectorType = DataConnectorType | Literal['api', 'database', 'file']
@@ -0,0 +1,122 @@
1
+ """
2
+ :mod:`etlplus.connector.utils` module.
3
+
4
+ Shared connector parsing helpers.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ from collections.abc import Mapping
10
+ from typing import Any
11
+
12
+ from .api import ConnectorApi
13
+ from .connector import Connector
14
+ from .database import ConnectorDb
15
+ from .enums import DataConnectorType
16
+ from .file import ConnectorFile
17
+
18
+ # SECTION: EXPORTS ========================================================== #
19
+
20
+
21
+ __all__ = [
22
+ # Functions
23
+ 'parse_connector',
24
+ ]
25
+
26
+
27
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
28
+
29
+
30
+ def _coerce_connector_type(
31
+ obj: Mapping[str, Any],
32
+ ) -> DataConnectorType:
33
+ """
34
+ Normalize and validate the connector ``type`` field.
35
+
36
+ Parameters
37
+ ----------
38
+ obj : Mapping[str, Any]
39
+ Mapping with a ``type`` entry.
40
+
41
+ Returns
42
+ -------
43
+ DataConnectorType
44
+ Normalized connector type enum.
45
+
46
+ Raises
47
+ ------
48
+ TypeError
49
+ If ``type`` is missing or unsupported.
50
+ """
51
+ if 'type' not in obj:
52
+ raise TypeError('Connector requires a "type"')
53
+ try:
54
+ return DataConnectorType.coerce(obj.get('type'))
55
+ except ValueError as exc:
56
+ allowed = ', '.join(DataConnectorType.choices())
57
+ raise TypeError(
58
+ f'Unsupported connector type: {obj.get("type")!r}. '
59
+ f'Expected one of {allowed}.',
60
+ ) from exc
61
+
62
+
63
+ def _load_connector(
64
+ kind: DataConnectorType,
65
+ ) -> type[Connector]:
66
+ """
67
+ Resolve the connector class for the requested kind.
68
+
69
+ Parameters
70
+ ----------
71
+ kind : DataConnectorType
72
+ Connector kind enum.
73
+
74
+ Returns
75
+ -------
76
+ type[Connector]
77
+ Connector class corresponding to *kind*.
78
+ """
79
+ match kind:
80
+ case DataConnectorType.API:
81
+ return ConnectorApi
82
+ case DataConnectorType.DATABASE:
83
+ return ConnectorDb
84
+ case DataConnectorType.FILE:
85
+ return ConnectorFile
86
+
87
+
88
+ # SECTION: FUNCTIONS ======================================================== #
89
+
90
+
91
+ def parse_connector(
92
+ obj: Mapping[str, Any],
93
+ ) -> Connector:
94
+ """
95
+ Dispatch to a concrete connector constructor based on ``type``.
96
+
97
+ Parameters
98
+ ----------
99
+ obj : Mapping[str, Any]
100
+ Mapping with at least ``name`` and ``type``.
101
+
102
+ Returns
103
+ -------
104
+ Connector
105
+ Concrete connector instance.
106
+
107
+ Raises
108
+ ------
109
+ TypeError
110
+ If the mapping is invalid or the connector type is unsupported.
111
+
112
+ Notes
113
+ -----
114
+ Delegates to the tolerant ``from_obj`` constructors for each connector
115
+ kind. Connector types are normalized via
116
+ :class:`etlplus.connector.enums.DataConnectorType`, so common aliases
117
+ (e.g., ``'db'`` or ``'http'``) are accepted.
118
+ """
119
+ if not isinstance(obj, Mapping):
120
+ raise TypeError('Connector configuration must be a mapping.')
121
+ connector_cls = _load_connector(_coerce_connector_type(obj))
122
+ return connector_cls.from_obj(obj)
etlplus/database/ddl.py CHANGED
@@ -233,7 +233,7 @@ def render_table_sql(
233
233
  template : TemplateKey | None, optional
234
234
  Template key to use (default: 'ddl').
235
235
  template_path : str | None, optional
236
- Path to a custom template file (overrides ``template``).
236
+ Path to a custom template file (overrides *template*).
237
237
 
238
238
  Returns
239
239
  -------
@@ -264,7 +264,7 @@ def render_tables(
264
264
  template : TemplateKey | None, optional
265
265
  Template key to use (default: 'ddl').
266
266
  template_path : str | None, optional
267
- Path to a custom template file (overrides ``template``).
267
+ Path to a custom template file (overrides *template*).
268
268
 
269
269
  Returns
270
270
  -------
@@ -87,7 +87,7 @@ def load_database_url_from_config(
87
87
  Extract a database URL/DSN from a YAML/JSON config file.
88
88
 
89
89
  The loader is schema-tolerant: it looks for a top-level "databases" map
90
- and then for a named entry (``name``). Each entry may contain either a
90
+ and then for a named entry (*name*). Each entry may contain either a
91
91
  ``connection_string``/``url``/``dsn`` or a nested ``default`` block with
92
92
  those fields.
93
93
 
@@ -136,9 +136,25 @@ def load_database_url_from_config(
136
136
  return url
137
137
 
138
138
 
139
- def make_engine(url: str | None = None, **engine_kwargs: Any) -> Engine:
140
- """Create a SQLAlchemy Engine, defaulting to env config if no URL given."""
139
+ def make_engine(
140
+ url: str | None = None,
141
+ **engine_kwargs: Any,
142
+ ) -> Engine:
143
+ """
144
+ Create a SQLAlchemy Engine, defaulting to env config if no URL given.
145
+
146
+ Parameters
147
+ ----------
148
+ url : str | None, optional
149
+ Database URL/DSN string. When omitted, ``DATABASE_URL`` is used.
150
+ **engine_kwargs : Any
151
+ Extra keyword arguments forwarded to ``create_engine``.
141
152
 
153
+ Returns
154
+ -------
155
+ Engine
156
+ Configured SQLAlchemy engine instance.
157
+ """
142
158
  resolved_url = url or DATABASE_URL
143
159
  return create_engine(resolved_url, pool_pre_ping=True, **engine_kwargs)
144
160
 
etlplus/database/orm.py CHANGED
@@ -201,12 +201,14 @@ def build_models(
201
201
  ) -> ModelRegistry:
202
202
  """
203
203
  Build SQLAlchemy ORM models from table specifications.
204
+
204
205
  Parameters
205
206
  ----------
206
207
  specs : list[TableSpec]
207
208
  List of table specifications.
208
209
  base : type[DeclarativeBase], optional
209
210
  Base class for the ORM models (default: :class:`Base`).
211
+
210
212
  Returns
211
213
  -------
212
214
  ModelRegistry
etlplus/enums.py CHANGED
@@ -22,7 +22,6 @@ __all__ = [
22
22
  # Enums
23
23
  'AggregateName',
24
24
  'CoercibleStrEnum',
25
- 'DataConnectorType',
26
25
  'OperatorName',
27
26
  'PipelineStep',
28
27
  ]
@@ -76,7 +75,7 @@ class CoercibleStrEnum(enum.StrEnum):
76
75
  @classmethod
77
76
  def coerce(cls, value: Self | str | object) -> Self:
78
77
  """
79
- Convert an enum member or string-like input to a member of ``cls``.
78
+ Convert an enum member or string-like input to a member of *cls*.
80
79
 
81
80
  Parameters
82
81
  ----------
@@ -168,37 +167,6 @@ class AggregateName(CoercibleStrEnum):
168
167
  return lambda xs, n: (fmean(xs) if xs else 0.0)
169
168
 
170
169
 
171
- class DataConnectorType(CoercibleStrEnum):
172
- """Supported data connector types."""
173
-
174
- # -- Constants -- #
175
-
176
- API = 'api'
177
- DATABASE = 'database'
178
- FILE = 'file'
179
-
180
- # -- Class Methods -- #
181
-
182
- @classmethod
183
- def aliases(cls) -> StrStrMap:
184
- """
185
- Return a mapping of common aliases for each enum member.
186
-
187
- Returns
188
- -------
189
- StrStrMap
190
- A mapping of alias names to their corresponding enum member names.
191
- """
192
- return {
193
- 'http': 'api',
194
- 'https': 'api',
195
- 'rest': 'api',
196
- 'db': 'database',
197
- 'filesystem': 'file',
198
- 'fs': 'file',
199
- }
200
-
201
-
202
170
  class OperatorName(CoercibleStrEnum):
203
171
  """Supported comparison operators with helpers."""
204
172
 
etlplus/file/_imports.py CHANGED
@@ -12,6 +12,7 @@ from typing import Any
12
12
  # SECTION: INTERNAL CONSTANTS =============================================== #
13
13
 
14
14
 
15
+ # Optional Python module support (lazy-loaded to avoid hard dependency)
15
16
  _MODULE_CACHE: dict[str, Any] = {}
16
17
 
17
18
 
etlplus/file/_io.py CHANGED
@@ -8,6 +8,7 @@ from __future__ import annotations
8
8
 
9
9
  import csv
10
10
  from pathlib import Path
11
+ from typing import Any
11
12
  from typing import cast
12
13
 
13
14
  from ..types import JSONData
@@ -17,6 +18,44 @@ from ..types import JSONList
17
18
  # SECTION: FUNCTIONS ======================================================== #
18
19
 
19
20
 
21
+ def coerce_record_payload(
22
+ payload: Any,
23
+ *,
24
+ format_name: str,
25
+ ) -> JSONData:
26
+ """
27
+ Validate that *payload* is an object or list of objects.
28
+
29
+ Parameters
30
+ ----------
31
+ payload : Any
32
+ Parsed payload to validate.
33
+ format_name : str
34
+ Human-readable format name for error messages.
35
+
36
+ Returns
37
+ -------
38
+ JSONData
39
+ *payload* when it is a dict or a list of dicts.
40
+
41
+ Raises
42
+ ------
43
+ TypeError
44
+ If the payload is not a dict or list of dicts.
45
+ """
46
+ if isinstance(payload, dict):
47
+ return cast(JSONDict, payload)
48
+ if isinstance(payload, list):
49
+ if all(isinstance(item, dict) for item in payload):
50
+ return cast(JSONList, payload)
51
+ raise TypeError(
52
+ f'{format_name} array must contain only objects (dicts)',
53
+ )
54
+ raise TypeError(
55
+ f'{format_name} root must be an object or an array of objects',
56
+ )
57
+
58
+
20
59
  def normalize_records(
21
60
  data: JSONData,
22
61
  format_name: str,
@@ -50,9 +89,13 @@ def normalize_records(
50
89
  return [cast(JSONDict, data)]
51
90
 
52
91
 
53
- def read_delimited(path: Path, *, delimiter: str) -> JSONList:
92
+ def read_delimited(
93
+ path: Path,
94
+ *,
95
+ delimiter: str,
96
+ ) -> JSONList:
54
97
  """
55
- Read delimited content from ``path``.
98
+ Read delimited content from *path*.
56
99
 
57
100
  Parameters
58
101
  ----------
@@ -79,9 +122,14 @@ def read_delimited(path: Path, *, delimiter: str) -> JSONList:
79
122
  return rows
80
123
 
81
124
 
82
- def write_delimited(path: Path, data: JSONData, *, delimiter: str) -> int:
125
+ def write_delimited(
126
+ path: Path,
127
+ data: JSONData,
128
+ *,
129
+ delimiter: str,
130
+ ) -> int:
83
131
  """
84
- Write ``data`` to a delimited file and return record count.
132
+ Write *data* to a delimited file and return record count.
85
133
 
86
134
  Parameters
87
135
  ----------
etlplus/file/accdb.py CHANGED
@@ -28,6 +28,7 @@ from . import stub
28
28
 
29
29
 
30
30
  __all__ = [
31
+ # Functions
31
32
  'read',
32
33
  'write',
33
34
  ]
@@ -40,7 +41,7 @@ def read(
40
41
  path: Path,
41
42
  ) -> JSONList:
42
43
  """
43
- Read ACCDB content from ``path``.
44
+ Read ACCDB content from *path*.
44
45
 
45
46
  Parameters
46
47
  ----------
@@ -60,7 +61,7 @@ def write(
60
61
  data: JSONData,
61
62
  ) -> int:
62
63
  """
63
- Write ``data`` to ACCDB at ``path`` and return record count.
64
+ Write *data* to ACCDB at *path* and return record count.
64
65
 
65
66
  Parameters
66
67
  ----------
etlplus/file/arrow.py CHANGED
@@ -28,6 +28,7 @@ from . import stub
28
28
 
29
29
 
30
30
  __all__ = [
31
+ # Functions
31
32
  'read',
32
33
  'write',
33
34
  ]
@@ -40,7 +41,7 @@ def read(
40
41
  path: Path,
41
42
  ) -> JSONList:
42
43
  """
43
- Read ARROW content from ``path``.
44
+ Read ARROW content from *path*.
44
45
 
45
46
  Parameters
46
47
  ----------
@@ -60,7 +61,7 @@ def write(
60
61
  data: JSONData,
61
62
  ) -> int:
62
63
  """
63
- Write ``data`` to ARROW at ``path`` and return record count.
64
+ Write *data* to ARROW at *path* and return record count.
64
65
 
65
66
  Parameters
66
67
  ----------
etlplus/file/avro.py CHANGED
@@ -33,6 +33,7 @@ from ._io import normalize_records
33
33
 
34
34
 
35
35
  __all__ = [
36
+ # Functions
36
37
  'read',
37
38
  'write',
38
39
  ]
@@ -124,7 +125,7 @@ def read(
124
125
  path: Path,
125
126
  ) -> JSONList:
126
127
  """
127
- Read AVRO content from ``path``.
128
+ Read AVRO content from *path*.
128
129
 
129
130
  Parameters
130
131
  ----------
@@ -147,7 +148,7 @@ def write(
147
148
  data: JSONData,
148
149
  ) -> int:
149
150
  """
150
- Write ``data`` to AVRO at ``path`` and return record count.
151
+ Write *data* to AVRO at *path* and return record count.
151
152
 
152
153
  Parameters
153
154
  ----------
etlplus/file/bson.py CHANGED
@@ -27,6 +27,7 @@ from . import stub
27
27
 
28
28
 
29
29
  __all__ = [
30
+ # Functions
30
31
  'read',
31
32
  'write',
32
33
  ]
@@ -39,7 +40,7 @@ def read(
39
40
  path: Path,
40
41
  ) -> JSONList:
41
42
  """
42
- Read BSON content from ``path``.
43
+ Read BSON content from *path*.
43
44
 
44
45
  Parameters
45
46
  ----------
@@ -59,7 +60,7 @@ def write(
59
60
  data: JSONData,
60
61
  ) -> int:
61
62
  """
62
- Write ``data`` to BSON at ``path`` and return record count.
63
+ Write *data* to BSON at *path* and return record count.
63
64
 
64
65
  Parameters
65
66
  ----------
etlplus/file/cbor.py CHANGED
@@ -28,6 +28,7 @@ from . import stub
28
28
 
29
29
 
30
30
  __all__ = [
31
+ # Functions
31
32
  'read',
32
33
  'write',
33
34
  ]
@@ -40,7 +41,7 @@ def read(
40
41
  path: Path,
41
42
  ) -> JSONList:
42
43
  """
43
- Read CBOR content from ``path``.
44
+ Read CBOR content from *path*.
44
45
 
45
46
  Parameters
46
47
  ----------
@@ -60,7 +61,7 @@ def write(
60
61
  data: JSONData,
61
62
  ) -> int:
62
63
  """
63
- Write ``data`` to CBOR at ``path`` and return record count.
64
+ Write *data* to CBOR at *path* and return record count.
64
65
 
65
66
  Parameters
66
67
  ----------
etlplus/file/cfg.py CHANGED
@@ -29,6 +29,7 @@ from . import stub
29
29
 
30
30
 
31
31
  __all__ = [
32
+ # Functions
32
33
  'read',
33
34
  'write',
34
35
  ]
@@ -41,7 +42,7 @@ def read(
41
42
  path: Path,
42
43
  ) -> JSONList:
43
44
  """
44
- Read CFG content from ``path``.
45
+ Read CFG content from *path*.
45
46
 
46
47
  Parameters
47
48
  ----------
@@ -61,7 +62,7 @@ def write(
61
62
  data: JSONData,
62
63
  ) -> int:
63
64
  """
64
- Write ``data`` to CFG file at ``path`` and return record count.
65
+ Write *data* to CFG file at *path* and return record count.
65
66
 
66
67
  Parameters
67
68
  ----------
etlplus/file/conf.py CHANGED
@@ -30,6 +30,7 @@ from . import stub
30
30
 
31
31
 
32
32
  __all__ = [
33
+ # Functions
33
34
  'read',
34
35
  'write',
35
36
  ]
@@ -42,7 +43,7 @@ def read(
42
43
  path: Path,
43
44
  ) -> JSONList:
44
45
  """
45
- Read CONF content from ``path``.
46
+ Read CONF content from *path*.
46
47
 
47
48
  Parameters
48
49
  ----------
@@ -62,7 +63,7 @@ def write(
62
63
  data: JSONData,
63
64
  ) -> int:
64
65
  """
65
- Write ``data`` to CONF at ``path`` and return record count.
66
+ Write *data* to CONF at *path* and return record count.
66
67
 
67
68
  Parameters
68
69
  ----------
etlplus/file/core.py CHANGED
@@ -22,7 +22,10 @@ from .enums import infer_file_format_and_compression
22
22
  # SECTION: EXPORTS ========================================================== #
23
23
 
24
24
 
25
- __all__ = ['File']
25
+ __all__ = [
26
+ # Classes
27
+ 'File',
28
+ ]
26
29
 
27
30
 
28
31
  # SECTION: INTERNAL FUNCTIONS =============================================== #
@@ -30,7 +33,7 @@ __all__ = ['File']
30
33
 
31
34
  def _accepts_root_tag(handler: object) -> bool:
32
35
  """
33
- Return True when ``handler`` supports a ``root_tag`` argument.
36
+ Return True when *handler* supports a ``root_tag`` argument.
34
37
 
35
38
  Parameters
36
39
  ----------
@@ -57,7 +60,7 @@ def _accepts_root_tag(handler: object) -> bool:
57
60
  @cache
58
61
  def _module_for_format(file_format: FileFormat) -> ModuleType:
59
62
  """
60
- Import and return the module for ``file_format``.
63
+ Import and return the module for *file_format*.
61
64
 
62
65
  Parameters
63
66
  ----------
@@ -112,8 +115,8 @@ class File:
112
115
  """
113
116
  Auto-detect and set the file format on initialization.
114
117
 
115
- If no explicit ``file_format`` is provided, attempt to infer it from
116
- the file path's extension and update :attr:`file_format`. If the
118
+ If no explicit :attr:`file_format` is provided, attempt to infer it
119
+ from the file path's extension and update :attr:`file_format`. If the
117
120
  extension is unknown, the attribute is left as ``None`` and will be
118
121
  validated later by :meth:`_ensure_format`.
119
122
  """
@@ -262,7 +265,7 @@ class File:
262
265
 
263
266
  def read(self) -> JSONData:
264
267
  """
265
- Read structured data from :attr:`path` using :attr:`file_format`.
268
+ Read structured data from :attr:path` using :attr:`file_format`.
266
269
 
267
270
  Returns
268
271
  -------
@@ -291,7 +294,7 @@ class File:
291
294
  root_tag: str = xml.DEFAULT_XML_ROOT,
292
295
  ) -> int:
293
296
  """
294
- Write ``data`` to :attr:`path` using :attr:`file_format`.
297
+ Write *data* to *path* using :attr:`file_format`.
295
298
 
296
299
  Parameters
297
300
  ----------
@@ -299,7 +302,7 @@ class File:
299
302
  Data to write to the file.
300
303
  root_tag : str, optional
301
304
  Root tag name to use when writing XML files. Defaults to
302
- ``'root'``.
305
+ ``xml.DEFAULT_XML_ROOT``.
303
306
 
304
307
  Returns
305
308
  -------
etlplus/file/csv.py CHANGED
@@ -29,6 +29,7 @@ from ._io import write_delimited
29
29
 
30
30
 
31
31
  __all__ = [
32
+ # Functions
32
33
  'read',
33
34
  'write',
34
35
  ]
@@ -41,7 +42,7 @@ def read(
41
42
  path: Path,
42
43
  ) -> JSONList:
43
44
  """
44
- Read CSV content from ``path``.
45
+ Read CSV content from *path*.
45
46
 
46
47
  Parameters
47
48
  ----------
@@ -61,7 +62,7 @@ def write(
61
62
  data: JSONData,
62
63
  ) -> int:
63
64
  """
64
- Write ``data`` to CSV at ``path`` and return record count.
65
+ Write *data* to CSV at *path* and return record count.
65
66
 
66
67
  Parameters
67
68
  ----------
etlplus/file/dat.py CHANGED
@@ -28,6 +28,7 @@ from . import stub
28
28
 
29
29
 
30
30
  __all__ = [
31
+ # Functions
31
32
  'read',
32
33
  'write',
33
34
  ]
@@ -40,7 +41,7 @@ def read(
40
41
  path: Path,
41
42
  ) -> JSONList:
42
43
  """
43
- Read DAT content from ``path``.
44
+ Read DAT content from *path*.
44
45
 
45
46
  Parameters
46
47
  ----------
@@ -60,7 +61,7 @@ def write(
60
61
  data: JSONData,
61
62
  ) -> int:
62
63
  """
63
- Write ``data`` to DAT file at ``path`` and return record count.
64
+ Write *data* to DAT file at *path* and return record count.
64
65
 
65
66
  Parameters
66
67
  ----------