etlplus 0.9.2__py3-none-any.whl → 0.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. etlplus/__init__.py +26 -1
  2. etlplus/api/README.md +3 -51
  3. etlplus/api/__init__.py +0 -10
  4. etlplus/api/config.py +28 -39
  5. etlplus/api/endpoint_client.py +3 -3
  6. etlplus/api/pagination/client.py +1 -1
  7. etlplus/api/rate_limiting/config.py +1 -13
  8. etlplus/api/rate_limiting/rate_limiter.py +11 -8
  9. etlplus/api/request_manager.py +6 -11
  10. etlplus/api/transport.py +2 -14
  11. etlplus/api/types.py +6 -96
  12. etlplus/cli/commands.py +43 -76
  13. etlplus/cli/constants.py +1 -1
  14. etlplus/cli/handlers.py +12 -40
  15. etlplus/cli/io.py +2 -2
  16. etlplus/cli/main.py +1 -1
  17. etlplus/cli/state.py +7 -4
  18. etlplus/{workflow → config}/__init__.py +23 -10
  19. etlplus/{workflow → config}/connector.py +44 -58
  20. etlplus/{workflow → config}/jobs.py +32 -105
  21. etlplus/{workflow → config}/pipeline.py +51 -59
  22. etlplus/{workflow → config}/profile.py +5 -8
  23. etlplus/config/types.py +204 -0
  24. etlplus/config/utils.py +120 -0
  25. etlplus/database/ddl.py +1 -1
  26. etlplus/database/engine.py +3 -19
  27. etlplus/database/orm.py +0 -2
  28. etlplus/database/schema.py +1 -1
  29. etlplus/enums.py +288 -0
  30. etlplus/{ops/extract.py → extract.py} +99 -81
  31. etlplus/file.py +652 -0
  32. etlplus/{ops/load.py → load.py} +101 -78
  33. etlplus/{ops/run.py → run.py} +127 -159
  34. etlplus/{api/utils.py → run_helpers.py} +153 -209
  35. etlplus/{ops/transform.py → transform.py} +68 -75
  36. etlplus/types.py +4 -5
  37. etlplus/utils.py +2 -136
  38. etlplus/{ops/validate.py → validate.py} +12 -22
  39. etlplus/validation/__init__.py +44 -0
  40. etlplus/{ops → validation}/utils.py +17 -53
  41. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/METADATA +17 -210
  42. etlplus-0.10.2.dist-info/RECORD +65 -0
  43. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/WHEEL +1 -1
  44. etlplus/README.md +0 -37
  45. etlplus/api/enums.py +0 -51
  46. etlplus/cli/README.md +0 -40
  47. etlplus/database/README.md +0 -48
  48. etlplus/file/README.md +0 -105
  49. etlplus/file/__init__.py +0 -25
  50. etlplus/file/_imports.py +0 -141
  51. etlplus/file/_io.py +0 -160
  52. etlplus/file/accdb.py +0 -78
  53. etlplus/file/arrow.py +0 -78
  54. etlplus/file/avro.py +0 -176
  55. etlplus/file/bson.py +0 -77
  56. etlplus/file/cbor.py +0 -78
  57. etlplus/file/cfg.py +0 -79
  58. etlplus/file/conf.py +0 -80
  59. etlplus/file/core.py +0 -322
  60. etlplus/file/csv.py +0 -79
  61. etlplus/file/dat.py +0 -78
  62. etlplus/file/dta.py +0 -77
  63. etlplus/file/duckdb.py +0 -78
  64. etlplus/file/enums.py +0 -343
  65. etlplus/file/feather.py +0 -111
  66. etlplus/file/fwf.py +0 -77
  67. etlplus/file/gz.py +0 -123
  68. etlplus/file/hbs.py +0 -78
  69. etlplus/file/hdf5.py +0 -78
  70. etlplus/file/ini.py +0 -79
  71. etlplus/file/ion.py +0 -78
  72. etlplus/file/jinja2.py +0 -78
  73. etlplus/file/json.py +0 -98
  74. etlplus/file/log.py +0 -78
  75. etlplus/file/mat.py +0 -78
  76. etlplus/file/mdb.py +0 -78
  77. etlplus/file/msgpack.py +0 -78
  78. etlplus/file/mustache.py +0 -78
  79. etlplus/file/nc.py +0 -78
  80. etlplus/file/ndjson.py +0 -108
  81. etlplus/file/numbers.py +0 -75
  82. etlplus/file/ods.py +0 -79
  83. etlplus/file/orc.py +0 -111
  84. etlplus/file/parquet.py +0 -113
  85. etlplus/file/pb.py +0 -78
  86. etlplus/file/pbf.py +0 -77
  87. etlplus/file/properties.py +0 -78
  88. etlplus/file/proto.py +0 -77
  89. etlplus/file/psv.py +0 -79
  90. etlplus/file/rda.py +0 -78
  91. etlplus/file/rds.py +0 -78
  92. etlplus/file/sas7bdat.py +0 -78
  93. etlplus/file/sav.py +0 -77
  94. etlplus/file/sqlite.py +0 -78
  95. etlplus/file/stub.py +0 -84
  96. etlplus/file/sylk.py +0 -77
  97. etlplus/file/tab.py +0 -81
  98. etlplus/file/toml.py +0 -78
  99. etlplus/file/tsv.py +0 -80
  100. etlplus/file/txt.py +0 -102
  101. etlplus/file/vm.py +0 -78
  102. etlplus/file/wks.py +0 -77
  103. etlplus/file/xls.py +0 -88
  104. etlplus/file/xlsm.py +0 -79
  105. etlplus/file/xlsx.py +0 -99
  106. etlplus/file/xml.py +0 -185
  107. etlplus/file/xpt.py +0 -78
  108. etlplus/file/yaml.py +0 -95
  109. etlplus/file/zip.py +0 -175
  110. etlplus/file/zsav.py +0 -77
  111. etlplus/ops/README.md +0 -50
  112. etlplus/ops/__init__.py +0 -61
  113. etlplus/templates/README.md +0 -46
  114. etlplus/workflow/README.md +0 -52
  115. etlplus/workflow/dag.py +0 -105
  116. etlplus/workflow/types.py +0 -115
  117. etlplus-0.9.2.dist-info/RECORD +0 -134
  118. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/entry_points.txt +0 -0
  119. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/licenses/LICENSE +0 -0
  120. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/top_level.txt +0 -0
etlplus/file/xml.py DELETED
@@ -1,185 +0,0 @@
1
- """
2
- :mod:`etlplus.file.xml` module.
3
-
4
- Helpers for reading/writing Extensible Markup Language (XML) files.
5
-
6
- Notes
7
- -----
8
- - An XML file is a markup language file that uses tags to define elements.
9
- - Common cases:
10
- - Configuration files.
11
- - Data interchange between systems.
12
- - Document formatting.
13
- - Rule of thumb:
14
- - If the file follows the XML specification, use this module for
15
- reading and writing.
16
- """
17
-
18
- from __future__ import annotations
19
-
20
- import xml.etree.ElementTree as ET
21
- from pathlib import Path
22
- from typing import Any
23
-
24
- from ..types import JSONData
25
- from ..types import JSONDict
26
- from ..utils import count_records
27
-
28
- # SECTION: EXPORTS ========================================================== #
29
-
30
-
31
- __all__ = [
32
- 'read',
33
- 'write',
34
- ]
35
-
36
-
37
- # SECTION: CONSTANTS ======================================================== #
38
-
39
-
40
- DEFAULT_XML_ROOT = 'root'
41
-
42
-
43
- # SECTION: INTERNAL FUNCTIONS =============================================== #
44
-
45
-
46
- def _dict_to_element(
47
- name: str,
48
- payload: Any,
49
- ) -> ET.Element:
50
- """
51
- Convert a dictionary-like payload into an XML element.
52
-
53
- Parameters
54
- ----------
55
- name : str
56
- Name of the XML element.
57
- payload : Any
58
- The data to include in the XML element.
59
-
60
- Returns
61
- -------
62
- ET.Element
63
- The constructed XML element.
64
- """
65
- element = ET.Element(name)
66
-
67
- if isinstance(payload, dict):
68
- text = payload.get('text')
69
- if text is not None:
70
- element.text = str(text)
71
-
72
- for key, value in payload.items():
73
- if key == 'text':
74
- continue
75
- if key.startswith('@'):
76
- element.set(key[1:], str(value))
77
- continue
78
- if isinstance(value, list):
79
- for item in value:
80
- element.append(_dict_to_element(key, item))
81
- else:
82
- element.append(_dict_to_element(key, value))
83
- elif isinstance(payload, list):
84
- for item in payload:
85
- element.append(_dict_to_element('item', item))
86
- elif payload is not None:
87
- element.text = str(payload)
88
-
89
- return element
90
-
91
-
92
- def _element_to_dict(
93
- element: ET.Element,
94
- ) -> JSONDict:
95
- """
96
- Convert an XML element into a nested dictionary.
97
-
98
- Parameters
99
- ----------
100
- element : ET.Element
101
- XML element to convert.
102
-
103
- Returns
104
- -------
105
- JSONDict
106
- Nested dictionary representation of the XML element.
107
- """
108
- result: JSONDict = {}
109
- text = (element.text or '').strip()
110
- if text:
111
- result['text'] = text
112
-
113
- for child in element:
114
- child_data = _element_to_dict(child)
115
- tag = child.tag
116
- if tag in result:
117
- existing = result[tag]
118
- if isinstance(existing, list):
119
- existing.append(child_data)
120
- else:
121
- result[tag] = [existing, child_data]
122
- else:
123
- result[tag] = child_data
124
-
125
- for key, value in element.attrib.items():
126
- if key in result:
127
- result[f'@{key}'] = value
128
- else:
129
- result[key] = value
130
- return result
131
-
132
-
133
- # SECTION: FUNCTIONS ======================================================== #
134
-
135
-
136
- def read(
137
- path: Path,
138
- ) -> JSONDict:
139
- """
140
- Read XML content from ``path``.
141
-
142
- Parameters
143
- ----------
144
- path : Path
145
- Path to the XML file on disk.
146
-
147
- Returns
148
- -------
149
- JSONDict
150
- Nested dictionary representation of the XML file.
151
- """
152
- tree = ET.parse(path)
153
- root = tree.getroot()
154
-
155
- return {root.tag: _element_to_dict(root)}
156
-
157
-
158
- def write(path: Path, data: JSONData, *, root_tag: str) -> int:
159
- """
160
- Write ``data`` to XML at ``path`` and return record count.
161
-
162
- Parameters
163
- ----------
164
- path : Path
165
- Path to the XML file on disk.
166
- data : JSONData
167
- Data to write as XML.
168
- root_tag : str
169
- Root tag name to use when writing XML files.
170
-
171
- Returns
172
- -------
173
- int
174
- The number of records written to the XML file.
175
- """
176
- if isinstance(data, dict) and len(data) == 1:
177
- root_name, payload = next(iter(data.items()))
178
- root_element = _dict_to_element(str(root_name), payload)
179
- else:
180
- root_element = _dict_to_element(root_tag, data)
181
-
182
- tree = ET.ElementTree(root_element)
183
- tree.write(path, encoding='utf-8', xml_declaration=True)
184
-
185
- return count_records(data)
etlplus/file/xpt.py DELETED
@@ -1,78 +0,0 @@
1
- """
2
- :mod:`etlplus.file.xpt` module.
3
-
4
- Helpers for reading/writing SAS Transport (XPT) files.
5
-
6
- Notes
7
- -----
8
- - A SAS Transport (XPT) file is a standardized file format used to transfer
9
- SAS datasets between different systems.
10
- - Common cases:
11
- - Sharing datasets between different SAS installations.
12
- - Archiving datasets in a platform-independent format.
13
- - Importing/exporting data to/from statistical software that supports XPT.
14
- - Rule of thumb:
15
- - If you need to work with XPT files, use this module for reading
16
- and writing.
17
- """
18
-
19
- from __future__ import annotations
20
-
21
- from pathlib import Path
22
-
23
- from ..types import JSONData
24
- from ..types import JSONList
25
- from . import stub
26
-
27
- # SECTION: EXPORTS ========================================================== #
28
-
29
-
30
- __all__ = [
31
- 'read',
32
- 'write',
33
- ]
34
-
35
-
36
- # SECTION: FUNCTIONS ======================================================== #
37
-
38
-
39
- def read(
40
- path: Path,
41
- ) -> JSONList:
42
- """
43
- Read XPT content from ``path``.
44
-
45
- Parameters
46
- ----------
47
- path : Path
48
- Path to the XPT file on disk.
49
-
50
- Returns
51
- -------
52
- JSONList
53
- The list of dictionaries read from the XPT file.
54
- """
55
- return stub.read(path, format_name='XPT')
56
-
57
-
58
- def write(
59
- path: Path,
60
- data: JSONData,
61
- ) -> int:
62
- """
63
- Write ``data`` to XPT file at ``path`` and return record count.
64
-
65
- Parameters
66
- ----------
67
- path : Path
68
- Path to the XPT file on disk.
69
- data : JSONData
70
- Data to write as XPT file. Should be a list of dictionaries or a
71
- single dictionary.
72
-
73
- Returns
74
- -------
75
- int
76
- The number of rows written to the XPT file.
77
- """
78
- return stub.write(path, data, format_name='XPT')
etlplus/file/yaml.py DELETED
@@ -1,95 +0,0 @@
1
- """
2
- :mod:`etlplus.file.yaml` module.
3
-
4
- Helpers for reading/writing YAML Ain't Markup Language (YAML) files.
5
-
6
- Notes
7
- -----
8
- - A YAML file is a human-readable data serialization format.
9
- - Common cases:
10
- - Configuration files.
11
- - Data exchange between languages with different data structures.
12
- - Complex data storage.
13
- - Rule of thumb:
14
- - If the file follows the YAML specification, use this module for
15
- reading and writing.
16
- """
17
-
18
- from __future__ import annotations
19
-
20
- from pathlib import Path
21
-
22
- from ..types import JSONData
23
- from ..utils import count_records
24
- from ._imports import get_yaml
25
- from ._io import coerce_record_payload
26
-
27
- # SECTION: EXPORTS ========================================================== #
28
-
29
-
30
- __all__ = [
31
- 'read',
32
- 'write',
33
- ]
34
-
35
-
36
- # SECTION: FUNCTIONS ======================================================== #
37
-
38
-
39
- def read(
40
- path: Path,
41
- ) -> JSONData:
42
- """
43
- Read YAML content from ``path``.
44
-
45
- Validates that the YAML root is a dict or a list of dicts.
46
-
47
- Parameters
48
- ----------
49
- path : Path
50
- Path to the YAML file on disk.
51
-
52
- Returns
53
- -------
54
- JSONData
55
- The structured data read from the YAML file.
56
-
57
- Raises
58
- ------
59
- TypeError
60
- If the YAML root is not an object or an array of objects.
61
- """
62
- with path.open('r', encoding='utf-8') as handle:
63
- loaded = get_yaml().safe_load(handle)
64
-
65
- return coerce_record_payload(loaded, format_name='YAML')
66
-
67
-
68
- def write(
69
- path: Path,
70
- data: JSONData,
71
- ) -> int:
72
- """
73
- Write ``data`` as YAML to ``path`` and return record count.
74
-
75
- Parameters
76
- ----------
77
- path : Path
78
- Path to the YAML file on disk.
79
- data : JSONData
80
- Data to write as YAML.
81
-
82
- Returns
83
- -------
84
- int
85
- The number of records written.
86
- """
87
- with path.open('w', encoding='utf-8') as handle:
88
- get_yaml().safe_dump(
89
- data,
90
- handle,
91
- sort_keys=False,
92
- allow_unicode=True,
93
- default_flow_style=False,
94
- )
95
- return count_records(data)
etlplus/file/zip.py DELETED
@@ -1,175 +0,0 @@
1
- """
2
- :mod:`etlplus.file.zip` module.
3
-
4
- Helpers for reading/writing ZIP files.
5
- """
6
-
7
- from __future__ import annotations
8
-
9
- import tempfile
10
- import zipfile
11
- from pathlib import Path
12
-
13
- from ..types import JSONData
14
- from ..types import JSONDict
15
- from .enums import CompressionFormat
16
- from .enums import FileFormat
17
- from .enums import infer_file_format_and_compression
18
-
19
- # SECTION: EXPORTS ========================================================== #
20
-
21
-
22
- __all__ = [
23
- 'read',
24
- 'write',
25
- ]
26
-
27
-
28
- # SECTION: INTERNAL FUNCTIONS =============================================== #
29
-
30
-
31
- def _resolve_format(
32
- filename: str,
33
- ) -> FileFormat:
34
- """
35
- Resolve the inner file format from a filename.
36
-
37
- Parameters
38
- ----------
39
- filename : str
40
- The name of the file inside the ZIP archive.
41
-
42
- Returns
43
- -------
44
- FileFormat
45
- The inferred inner file format.
46
-
47
- Raises
48
- ------
49
- ValueError
50
- If the file format cannot be inferred from the filename.
51
- """
52
- fmt, compression = infer_file_format_and_compression(filename)
53
- if compression is not None and compression is not CompressionFormat.ZIP:
54
- raise ValueError(f'Unexpected compression in archive: {filename}')
55
- if fmt is None:
56
- raise ValueError(
57
- f'Cannot infer file format from compressed file {filename!r}',
58
- )
59
- return fmt
60
-
61
-
62
- def _extract_payload(
63
- entry: zipfile.ZipInfo,
64
- archive: zipfile.ZipFile,
65
- ) -> bytes:
66
- """
67
- Extract an archive entry into memory.
68
-
69
- Parameters
70
- ----------
71
- entry : zipfile.ZipInfo
72
- The ZIP archive entry.
73
- archive : zipfile.ZipFile
74
- The opened ZIP archive.
75
-
76
- Returns
77
- -------
78
- bytes
79
- The raw payload.
80
- """
81
- with archive.open(entry, 'r') as handle:
82
- return handle.read()
83
-
84
-
85
- # SECTION: FUNCTIONS ======================================================== #
86
-
87
-
88
- def read(
89
- path: Path,
90
- ) -> JSONData:
91
- """
92
- Read ZIP content from ``path`` and parse the inner payload(s).
93
-
94
- Parameters
95
- ----------
96
- path : Path
97
- Path to the ZIP file on disk.
98
-
99
- Returns
100
- -------
101
- JSONData
102
- Parsed payload.
103
-
104
- Raises
105
- ------
106
- ValueError
107
- If the ZIP archive is empty.
108
- """
109
- with zipfile.ZipFile(path, 'r') as archive:
110
- entries = [entry for entry in archive.infolist() if not entry.is_dir()]
111
- if not entries:
112
- raise ValueError(f'ZIP archive is empty: {path}')
113
-
114
- if len(entries) == 1:
115
- entry = entries[0]
116
- fmt = _resolve_format(entry.filename)
117
- payload = _extract_payload(entry, archive)
118
- with tempfile.TemporaryDirectory() as tmpdir:
119
- tmp_path = Path(tmpdir) / Path(entry.filename).name
120
- tmp_path.write_bytes(payload)
121
- from .core import File
122
-
123
- return File(tmp_path, fmt).read()
124
-
125
- results: JSONDict = {}
126
- for entry in entries:
127
- fmt = _resolve_format(entry.filename)
128
- payload = _extract_payload(entry, archive)
129
- with tempfile.TemporaryDirectory() as tmpdir:
130
- tmp_path = Path(tmpdir) / Path(entry.filename).name
131
- tmp_path.write_bytes(payload)
132
- from .core import File
133
-
134
- results[entry.filename] = File(tmp_path, fmt).read()
135
- return results
136
-
137
-
138
- def write(
139
- path: Path,
140
- data: JSONData,
141
- ) -> int:
142
- """
143
- Write ``data`` to ZIP at ``path`` and return record count.
144
-
145
- Parameters
146
- ----------
147
- path : Path
148
- Path to the ZIP file on disk.
149
- data : JSONData
150
- Data to write.
151
-
152
- Returns
153
- -------
154
- int
155
- Number of records written.
156
- """
157
- fmt = _resolve_format(path.name)
158
- inner_name = Path(path.name).with_suffix('').name
159
-
160
- with tempfile.TemporaryDirectory() as tmpdir:
161
- tmp_path = Path(tmpdir) / inner_name
162
- from .core import File
163
-
164
- count = File(tmp_path, fmt).write(data)
165
- payload = tmp_path.read_bytes()
166
-
167
- path.parent.mkdir(parents=True, exist_ok=True)
168
- with zipfile.ZipFile(
169
- path,
170
- 'w',
171
- compression=zipfile.ZIP_DEFLATED,
172
- ) as archive:
173
- archive.writestr(inner_name, payload)
174
-
175
- return count
etlplus/file/zsav.py DELETED
@@ -1,77 +0,0 @@
1
- """
2
- :mod:`etlplus.file.zsav` module.
3
-
4
- Helpers for reading/writing compressed SPSS (ZSAV) data files.
5
-
6
- Notes
7
- -----
8
- - A ZSAV file is a compressed binary file format used by SPSS to store
9
- datasets, including variables, labels, and data types.
10
- - Common cases:
11
- - Reading compressed data for analysis in Python.
12
- - Writing processed data back to compressed SPSS format.
13
- - Rule of thumb:
14
- - If you need to work with compressed SPSS data files, use this module for
15
- reading and writing.
16
- """
17
-
18
- from __future__ import annotations
19
-
20
- from pathlib import Path
21
-
22
- from ..types import JSONData
23
- from ..types import JSONList
24
- from . import stub
25
-
26
- # SECTION: EXPORTS ========================================================== #
27
-
28
-
29
- __all__ = [
30
- 'read',
31
- 'write',
32
- ]
33
-
34
-
35
- # SECTION: FUNCTIONS ======================================================== #
36
-
37
-
38
- def read(
39
- path: Path,
40
- ) -> JSONList:
41
- """
42
- Read ZSAV content from ``path``.
43
-
44
- Parameters
45
- ----------
46
- path : Path
47
- Path to the ZSAV file on disk.
48
-
49
- Returns
50
- -------
51
- JSONList
52
- The list of dictionaries read from the ZSAV file.
53
- """
54
- return stub.read(path, format_name='ZSAV')
55
-
56
-
57
- def write(
58
- path: Path,
59
- data: JSONData,
60
- ) -> int:
61
- """
62
- Write ``data`` to ZSAV file at ``path`` and return record count.
63
-
64
- Parameters
65
- ----------
66
- path : Path
67
- Path to the ZSAV file on disk.
68
- data : JSONData
69
- Data to write as ZSAV file. Should be a list of dictionaries or a
70
- single dictionary.
71
-
72
- Returns
73
- -------
74
- int
75
- The number of rows written to the ZSAV file.
76
- """
77
- return stub.write(path, data, format_name='ZSAV')
etlplus/ops/README.md DELETED
@@ -1,50 +0,0 @@
1
- # etlplus.ops subpackage
2
-
3
- Documentation for the `etlplus.validation` subpackage: data validation utilities and helpers.
4
-
5
- - Provides flexible data validation for ETL pipelines
6
- - Supports type checking, required fields, and custom rules
7
- - Includes utilities for rule definition and validation logic
8
-
9
- Back to project overview: see the top-level [README](../../README.md).
10
-
11
- - [etlplus.ops subpackage](#etlplusops-subpackage)
12
- - [Validation Features](#validation-features)
13
- - [Defining Validation Rules](#defining-validation-rules)
14
- - [Example: Validating Data](#example-validating-data)
15
- - [See Also](#see-also)
16
-
17
- ## Validation Features
18
-
19
- - Type checking (string, number, boolean, etc.)
20
- - Required/optional fields
21
- - Enum and pattern validation
22
- - Custom rule support
23
-
24
- ## Defining Validation Rules
25
-
26
- Validation rules are defined as dictionaries specifying field types, requirements, and constraints:
27
-
28
- ```python
29
- rules = {
30
- "name": {"type": "string", "required": True},
31
- "age": {"type": "number", "min": 0, "max": 120},
32
- }
33
- ```
34
-
35
- ## Example: Validating Data
36
-
37
- ```python
38
- from etlplus.validation import validate
39
-
40
- result = validate({"name": "Alice", "age": 30}, rules)
41
- if result["valid"]:
42
- print("Data is valid!")
43
- else:
44
- print(result["errors"])
45
- ```
46
-
47
- ## See Also
48
-
49
- - Top-level CLI and library usage in the main [README](../../README.md)
50
- - Validation utilities in [utils.py](utils.py)