etlplus 0.9.2__py3-none-any.whl → 0.10.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. etlplus/__init__.py +26 -1
  2. etlplus/api/README.md +3 -51
  3. etlplus/api/__init__.py +0 -10
  4. etlplus/api/config.py +28 -39
  5. etlplus/api/endpoint_client.py +3 -3
  6. etlplus/api/pagination/client.py +1 -1
  7. etlplus/api/rate_limiting/config.py +1 -13
  8. etlplus/api/rate_limiting/rate_limiter.py +11 -8
  9. etlplus/api/request_manager.py +6 -11
  10. etlplus/api/transport.py +2 -14
  11. etlplus/api/types.py +6 -96
  12. etlplus/cli/commands.py +43 -76
  13. etlplus/cli/constants.py +1 -1
  14. etlplus/cli/handlers.py +12 -40
  15. etlplus/cli/io.py +2 -2
  16. etlplus/cli/main.py +1 -1
  17. etlplus/cli/state.py +7 -4
  18. etlplus/{workflow → config}/__init__.py +23 -10
  19. etlplus/{workflow → config}/connector.py +44 -58
  20. etlplus/{workflow → config}/jobs.py +32 -105
  21. etlplus/{workflow → config}/pipeline.py +51 -59
  22. etlplus/{workflow → config}/profile.py +5 -8
  23. etlplus/config/types.py +204 -0
  24. etlplus/config/utils.py +120 -0
  25. etlplus/database/ddl.py +1 -1
  26. etlplus/database/engine.py +3 -19
  27. etlplus/database/orm.py +0 -2
  28. etlplus/database/schema.py +1 -1
  29. etlplus/enums.py +288 -0
  30. etlplus/{ops/extract.py → extract.py} +99 -81
  31. etlplus/file.py +652 -0
  32. etlplus/{ops/load.py → load.py} +101 -78
  33. etlplus/{ops/run.py → run.py} +127 -159
  34. etlplus/{api/utils.py → run_helpers.py} +153 -209
  35. etlplus/{ops/transform.py → transform.py} +68 -75
  36. etlplus/types.py +4 -5
  37. etlplus/utils.py +2 -136
  38. etlplus/{ops/validate.py → validate.py} +12 -22
  39. etlplus/validation/__init__.py +44 -0
  40. etlplus/{ops → validation}/utils.py +17 -53
  41. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/METADATA +17 -210
  42. etlplus-0.10.2.dist-info/RECORD +65 -0
  43. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/WHEEL +1 -1
  44. etlplus/README.md +0 -37
  45. etlplus/api/enums.py +0 -51
  46. etlplus/cli/README.md +0 -40
  47. etlplus/database/README.md +0 -48
  48. etlplus/file/README.md +0 -105
  49. etlplus/file/__init__.py +0 -25
  50. etlplus/file/_imports.py +0 -141
  51. etlplus/file/_io.py +0 -160
  52. etlplus/file/accdb.py +0 -78
  53. etlplus/file/arrow.py +0 -78
  54. etlplus/file/avro.py +0 -176
  55. etlplus/file/bson.py +0 -77
  56. etlplus/file/cbor.py +0 -78
  57. etlplus/file/cfg.py +0 -79
  58. etlplus/file/conf.py +0 -80
  59. etlplus/file/core.py +0 -322
  60. etlplus/file/csv.py +0 -79
  61. etlplus/file/dat.py +0 -78
  62. etlplus/file/dta.py +0 -77
  63. etlplus/file/duckdb.py +0 -78
  64. etlplus/file/enums.py +0 -343
  65. etlplus/file/feather.py +0 -111
  66. etlplus/file/fwf.py +0 -77
  67. etlplus/file/gz.py +0 -123
  68. etlplus/file/hbs.py +0 -78
  69. etlplus/file/hdf5.py +0 -78
  70. etlplus/file/ini.py +0 -79
  71. etlplus/file/ion.py +0 -78
  72. etlplus/file/jinja2.py +0 -78
  73. etlplus/file/json.py +0 -98
  74. etlplus/file/log.py +0 -78
  75. etlplus/file/mat.py +0 -78
  76. etlplus/file/mdb.py +0 -78
  77. etlplus/file/msgpack.py +0 -78
  78. etlplus/file/mustache.py +0 -78
  79. etlplus/file/nc.py +0 -78
  80. etlplus/file/ndjson.py +0 -108
  81. etlplus/file/numbers.py +0 -75
  82. etlplus/file/ods.py +0 -79
  83. etlplus/file/orc.py +0 -111
  84. etlplus/file/parquet.py +0 -113
  85. etlplus/file/pb.py +0 -78
  86. etlplus/file/pbf.py +0 -77
  87. etlplus/file/properties.py +0 -78
  88. etlplus/file/proto.py +0 -77
  89. etlplus/file/psv.py +0 -79
  90. etlplus/file/rda.py +0 -78
  91. etlplus/file/rds.py +0 -78
  92. etlplus/file/sas7bdat.py +0 -78
  93. etlplus/file/sav.py +0 -77
  94. etlplus/file/sqlite.py +0 -78
  95. etlplus/file/stub.py +0 -84
  96. etlplus/file/sylk.py +0 -77
  97. etlplus/file/tab.py +0 -81
  98. etlplus/file/toml.py +0 -78
  99. etlplus/file/tsv.py +0 -80
  100. etlplus/file/txt.py +0 -102
  101. etlplus/file/vm.py +0 -78
  102. etlplus/file/wks.py +0 -77
  103. etlplus/file/xls.py +0 -88
  104. etlplus/file/xlsm.py +0 -79
  105. etlplus/file/xlsx.py +0 -99
  106. etlplus/file/xml.py +0 -185
  107. etlplus/file/xpt.py +0 -78
  108. etlplus/file/yaml.py +0 -95
  109. etlplus/file/zip.py +0 -175
  110. etlplus/file/zsav.py +0 -77
  111. etlplus/ops/README.md +0 -50
  112. etlplus/ops/__init__.py +0 -61
  113. etlplus/templates/README.md +0 -46
  114. etlplus/workflow/README.md +0 -52
  115. etlplus/workflow/dag.py +0 -105
  116. etlplus/workflow/types.py +0 -115
  117. etlplus-0.9.2.dist-info/RECORD +0 -134
  118. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/entry_points.txt +0 -0
  119. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/licenses/LICENSE +0 -0
  120. {etlplus-0.9.2.dist-info → etlplus-0.10.2.dist-info}/top_level.txt +0 -0
etlplus/cli/commands.py CHANGED
@@ -36,7 +36,7 @@ from typing import cast
36
36
  import typer
37
37
 
38
38
  from .. import __version__
39
- from ..file import FileFormat
39
+ from ..enums import FileFormat
40
40
  from . import handlers
41
41
  from .constants import CLI_DESCRIPTION
42
42
  from .constants import CLI_EPILOG
@@ -61,24 +61,6 @@ __all__ = ['app']
61
61
 
62
62
  # SECTION: TYPE ALIASES ==================================================== #
63
63
 
64
-
65
- JobOption = Annotated[
66
- str | None,
67
- typer.Option(
68
- '-j',
69
- '--job',
70
- help='Name of the job to run',
71
- ),
72
- ]
73
-
74
- JobsOption = Annotated[
75
- bool,
76
- typer.Option(
77
- '--jobs',
78
- help='List available job names and exit',
79
- ),
80
- ]
81
-
82
64
  OperationsOption = Annotated[
83
65
  str,
84
66
  typer.Option(
@@ -107,23 +89,6 @@ PipelineConfigOption = Annotated[
107
89
  ),
108
90
  ]
109
91
 
110
- PipelineOption = Annotated[
111
- str | None,
112
- typer.Option(
113
- '-p',
114
- '--pipeline',
115
- help='Name of the pipeline to run',
116
- ),
117
- ]
118
-
119
- PipelinesOption = Annotated[
120
- bool,
121
- typer.Option(
122
- '--pipelines',
123
- help='List ETL pipelines',
124
- ),
125
- ]
126
-
127
92
  RenderConfigOption = Annotated[
128
93
  str | None,
129
94
  typer.Option(
@@ -228,22 +193,6 @@ SourceTypeOption = Annotated[
228
193
  ),
229
194
  ]
230
195
 
231
- SourcesOption = Annotated[
232
- bool,
233
- typer.Option(
234
- '--sources',
235
- help='List data sources',
236
- ),
237
- ]
238
-
239
- SummaryOption = Annotated[
240
- bool,
241
- typer.Option(
242
- '--summary',
243
- help='Show pipeline summary (name, version, sources, targets, jobs)',
244
- ),
245
- ]
246
-
247
196
  TargetArg = Annotated[
248
197
  str,
249
198
  typer.Argument(
@@ -278,22 +227,6 @@ TargetTypeOption = Annotated[
278
227
  ),
279
228
  ]
280
229
 
281
- TargetsOption = Annotated[
282
- bool,
283
- typer.Option(
284
- '--targets',
285
- help='List data targets',
286
- ),
287
- ]
288
-
289
- TransformsOption = Annotated[
290
- bool,
291
- typer.Option(
292
- '--transforms',
293
- help='List data transforms',
294
- ),
295
- ]
296
-
297
230
 
298
231
  # SECTION: INTERNAL FUNCTIONS =============================================== #
299
232
 
@@ -408,12 +341,36 @@ def _root(
408
341
  def check_cmd(
409
342
  ctx: typer.Context,
410
343
  config: PipelineConfigOption,
411
- jobs: JobsOption = False,
412
- pipelines: PipelinesOption = False,
413
- sources: SourcesOption = False,
414
- summary: SummaryOption = False,
415
- targets: TargetsOption = False,
416
- transforms: TransformsOption = False,
344
+ jobs: bool = typer.Option(
345
+ False,
346
+ '--jobs',
347
+ help='List available job names and exit',
348
+ ),
349
+ pipelines: bool = typer.Option(
350
+ False,
351
+ '--pipelines',
352
+ help='List ETL pipelines',
353
+ ),
354
+ sources: bool = typer.Option(
355
+ False,
356
+ '--sources',
357
+ help='List data sources',
358
+ ),
359
+ summary: bool = typer.Option(
360
+ False,
361
+ '--summary',
362
+ help='Show pipeline summary (name, version, sources, targets, jobs)',
363
+ ),
364
+ targets: bool = typer.Option(
365
+ False,
366
+ '--targets',
367
+ help='List data targets',
368
+ ),
369
+ transforms: bool = typer.Option(
370
+ False,
371
+ '--transforms',
372
+ help='List data transforms',
373
+ ),
417
374
  ) -> int:
418
375
  """
419
376
  Inspect a pipeline configuration.
@@ -726,8 +683,18 @@ def render_cmd(
726
683
  def run_cmd(
727
684
  ctx: typer.Context,
728
685
  config: PipelineConfigOption,
729
- job: JobOption = None,
730
- pipeline: PipelineOption = None,
686
+ job: str | None = typer.Option(
687
+ None,
688
+ '-j',
689
+ '--job',
690
+ help='Name of the job to run',
691
+ ),
692
+ pipeline: str | None = typer.Option(
693
+ None,
694
+ '-p',
695
+ '--pipeline',
696
+ help='Name of the pipeline to run',
697
+ ),
731
698
  ) -> int:
732
699
  """
733
700
  Execute an ETL job or pipeline from a YAML configuration.
etlplus/cli/constants.py CHANGED
@@ -9,7 +9,7 @@ from __future__ import annotations
9
9
  from typing import Final
10
10
 
11
11
  from ..enums import DataConnectorType
12
- from ..file import FileFormat
12
+ from ..enums import FileFormat
13
13
 
14
14
  # SECTION: EXPORTS ========================================================== #
15
15
 
etlplus/cli/handlers.py CHANGED
@@ -14,20 +14,19 @@ from typing import Any
14
14
  from typing import Literal
15
15
  from typing import cast
16
16
 
17
+ from ..config import PipelineConfig
18
+ from ..config import load_pipeline_config
17
19
  from ..database import load_table_spec
18
20
  from ..database import render_tables
21
+ from ..extract import extract
19
22
  from ..file import File
20
- from ..file import FileFormat
21
- from ..ops import extract
22
- from ..ops import load
23
- from ..ops import run
24
- from ..ops import transform
25
- from ..ops import validate
26
- from ..ops.validate import FieldRules
23
+ from ..load import load
24
+ from ..run import run
25
+ from ..transform import transform
27
26
  from ..types import JSONData
28
27
  from ..types import TemplateKey
29
- from ..workflow import PipelineConfig
30
- from ..workflow import load_pipeline_config
28
+ from ..validate import FieldRules
29
+ from ..validate import validate
31
30
  from . import io as cli_io
32
31
 
33
32
  # SECTION: EXPORTS ========================================================== #
@@ -121,12 +120,9 @@ def _check_sections(
121
120
  if targets:
122
121
  sections['targets'] = [tgt.name for tgt in cfg.targets]
123
122
  if transforms:
124
- if isinstance(cfg.transforms, Mapping):
125
- sections['transforms'] = list(cfg.transforms)
126
- else:
127
- sections['transforms'] = [
128
- getattr(trf, 'name', None) for trf in cfg.transforms
129
- ]
123
+ sections['transforms'] = [
124
+ getattr(trf, 'name', None) for trf in cfg.transforms
125
+ ]
130
126
  if not sections:
131
127
  sections['jobs'] = _pipeline_summary(cfg)['jobs']
132
128
  return sections
@@ -160,29 +156,6 @@ def _pipeline_summary(
160
156
  }
161
157
 
162
158
 
163
- def _write_file_payload(
164
- payload: JSONData,
165
- target: str,
166
- *,
167
- format_hint: str | None,
168
- ) -> None:
169
- """
170
- Write a JSON-like payload to a file path using an optional format hint.
171
-
172
- Parameters
173
- ----------
174
- payload : JSONData
175
- The structured data to write.
176
- target : str
177
- File path to write to.
178
- format_hint : str | None
179
- Optional format hint for :class:`FileFormat`.
180
- """
181
- file_path = Path(target)
182
- file_format = FileFormat.coerce(format_hint) if format_hint else None
183
- File(file_path, file_format=file_format).write(payload)
184
-
185
-
186
159
  # SECTION: FUNCTIONS ======================================================== #
187
160
 
188
161
 
@@ -596,9 +569,8 @@ def transform_handler(
596
569
 
597
570
  data = transform(payload, cast(TransformOperations, operations_payload))
598
571
 
599
- # TODO: Generalize to handle non-file targets.
600
572
  if target and target != '-':
601
- _write_file_payload(data, target, format_hint=target_format)
573
+ File.write_file(target, data, file_format=target_format)
602
574
  print(f'Data transformed and saved to {target}')
603
575
  return 0
604
576
 
etlplus/cli/io.py CHANGED
@@ -15,8 +15,8 @@ from pathlib import Path
15
15
  from typing import Any
16
16
  from typing import cast
17
17
 
18
+ from ..enums import FileFormat
18
19
  from ..file import File
19
- from ..file import FileFormat
20
20
  from ..types import JSONData
21
21
  from ..utils import print_json
22
22
 
@@ -331,6 +331,6 @@ def write_json_output(
331
331
  """
332
332
  if not output_path or output_path == '-':
333
333
  return False
334
- File(Path(output_path), FileFormat.JSON).write(data)
334
+ File(Path(output_path), FileFormat.JSON).write_json(data)
335
335
  print(f'{success_message} {output_path}')
336
336
  return True
etlplus/cli/main.py CHANGED
@@ -50,7 +50,7 @@ def _emit_context_help(
50
50
  return False
51
51
 
52
52
  with contextlib.redirect_stdout(sys.stderr):
53
- print(ctx.get_help())
53
+ ctx.get_help()
54
54
  return True
55
55
 
56
56
 
etlplus/cli/state.py CHANGED
@@ -15,7 +15,6 @@ from typing import Final
15
15
 
16
16
  import typer
17
17
 
18
- from ..utils import normalize_str
19
18
  from .constants import DATA_CONNECTORS
20
19
 
21
20
  # SECTION: EXPORTS ========================================================== #
@@ -323,10 +322,14 @@ def validate_choice(
323
322
  typer.BadParameter
324
323
  If the input value is not in the set of valid choices.
325
324
  """
326
- v = normalize_str(str(value or ''))
327
- normalized_choices = {normalize_str(c): c for c in choices}
325
+ v = str(value or '').strip().lower()
326
+ normalized_choices = {c.lower() for c in choices}
328
327
  if v in normalized_choices:
329
- return normalized_choices[v]
328
+ # Preserve original casing from choices when possible for messages
329
+ for choice in choices:
330
+ if choice.lower() == v:
331
+ return choice
332
+ return v
330
333
  allowed = ', '.join(sorted(choices))
331
334
  raise typer.BadParameter(
332
335
  f"Invalid {label} '{value}'. Choose from: {allowed}",
@@ -1,7 +1,17 @@
1
1
  """
2
- :mod:`etlplus.workflow` package.
2
+ :mod:`etlplus.config` package.
3
3
 
4
- Job workflow helpers.
4
+ Configuration models and helpers for ETLPlus.
5
+
6
+ This package defines models for data sources/targets ("connectors"), APIs,
7
+ pagination/rate limits, pipeline orchestration, and related utilities. The
8
+ parsers are permissive (accepting ``Mapping[str, Any]``) and normalize to
9
+ concrete types without raising on unknown/optional fields.
10
+
11
+ Notes
12
+ -----
13
+ - The models use ``@dataclass(slots=True)`` and avoid mutating inputs.
14
+ - TypedDicts are editor/type-checking hints and are not enforced at runtime.
5
15
  """
6
16
 
7
17
  from __future__ import annotations
@@ -11,7 +21,6 @@ from .connector import ConnectorApi
11
21
  from .connector import ConnectorDb
12
22
  from .connector import ConnectorFile
13
23
  from .connector import parse_connector
14
- from .dag import topological_sort_jobs
15
24
  from .jobs import ExtractRef
16
25
  from .jobs import JobConfig
17
26
  from .jobs import LoadRef
@@ -19,25 +28,29 @@ from .jobs import TransformRef
19
28
  from .jobs import ValidationRef
20
29
  from .pipeline import PipelineConfig
21
30
  from .pipeline import load_pipeline_config
31
+ from .profile import ProfileConfig
32
+ from .types import ConnectorType
22
33
 
23
34
  # SECTION: EXPORTS ========================================================== #
24
35
 
25
36
 
26
37
  __all__ = [
27
- # Data Classes
38
+ # Connectors
39
+ 'Connector',
40
+ 'ConnectorType',
28
41
  'ConnectorApi',
29
42
  'ConnectorDb',
30
43
  'ConnectorFile',
44
+ 'parse_connector',
45
+ # Jobs / Refs
31
46
  'ExtractRef',
32
47
  'JobConfig',
33
48
  'LoadRef',
34
- 'PipelineConfig',
35
49
  'TransformRef',
36
50
  'ValidationRef',
37
- # Functions
51
+ # Pipeline
52
+ 'PipelineConfig',
38
53
  'load_pipeline_config',
39
- 'parse_connector',
40
- 'topological_sort_jobs',
41
- # Type Aliases
42
- 'Connector',
54
+ # Profile
55
+ 'ProfileConfig',
43
56
  ]
@@ -1,5 +1,5 @@
1
1
  """
2
- :mod:`etlplus.workflow.connector` module.
2
+ :mod:`etlplus.config.connector` module.
3
3
 
4
4
  A module defining configuration types for data source/target connectors in ETL
5
5
  pipelines. A "connector" is any I/O endpoint:
@@ -11,26 +11,25 @@ pipelines. A "connector" is any I/O endpoint:
11
11
 
12
12
  Examples
13
13
  --------
14
- - Use :class:`ConnectorApi`/:class:`ConnectorFile`/:class:`ConnectorDb` when
15
- you want the concrete dataclasses.
16
- - Use the :class:`Connector` union for typing a value that can be any
17
- connector.
18
- - Use :func:`parse_connector(obj)` to construct a connector instance from a
19
- generic mapping that includes a *type* key.
14
+ - Use ``ConnectorApi``/``ConnectorFile``/``ConnectorDb`` when you want the
15
+ concrete dataclasses.
16
+ - Use the ``Connector`` union for typing a value that can be any connector.
17
+ - Use ``parse_connector(obj)`` to construct a connector instance from a generic
18
+ mapping that includes a ``type`` key.
20
19
 
21
20
  Notes
22
21
  -----
23
22
  - TypedDict shapes are editor hints; runtime parsing remains permissive
24
- (from_obj accepts Mapping[str, Any]).
25
- - TypedDicts referenced in :mod:`etlplus.workflow.types` remain editor hints.
26
- Runtime parsing stays permissive and tolerant.
23
+ (from_obj accepts Mapping[str, Any]).
24
+ - TypedDicts referenced in :mod:`etlplus.config.types` remain editor hints.
25
+ Runtime parsing stays permissive and tolerant.
27
26
 
28
27
  See Also
29
28
  --------
30
29
  - TypedDict shapes for editor hints (not enforced at runtime):
31
- :mod:`etlplus.workflow.types.ConnectorApiConfigMap`,
32
- :mod:`etlplus.workflow.types.ConnectorDbConfigMap`,
33
- :mod:`etlplus.workflow.types.ConnectorFileConfigMap`.
30
+ :mod:`etlplus.config.types.ConnectorApiConfigMap`,
31
+ :mod:`etlplus.config.types.ConnectorDbConfigMap`,
32
+ :mod:`etlplus.config.types.ConnectorFileConfigMap`.
34
33
  """
35
34
 
36
35
  from __future__ import annotations
@@ -60,7 +59,7 @@ if TYPE_CHECKING: # Editor-only typing hints to avoid runtime imports
60
59
 
61
60
 
62
61
  __all__ = [
63
- # Data Classes
62
+ # Classes
64
63
  'ConnectorApi',
65
64
  'ConnectorDb',
66
65
  'ConnectorFile',
@@ -71,40 +70,6 @@ __all__ = [
71
70
  ]
72
71
 
73
72
 
74
- # SECTION: INTERNAL FUNCTIONS ============================================== #
75
-
76
-
77
- def _require_name(
78
- obj: StrAnyMap,
79
- *,
80
- kind: str,
81
- ) -> str:
82
- """
83
- Extract and validate the ``name`` field from connector mappings.
84
-
85
- Parameters
86
- ----------
87
- obj : StrAnyMap
88
- Connector mapping with a ``name`` entry.
89
- kind : str
90
- Connector kind used in the error message.
91
-
92
- Returns
93
- -------
94
- str
95
- Valid connector name.
96
-
97
- Raises
98
- ------
99
- TypeError
100
- If ``name`` is missing or not a string.
101
- """
102
- name = obj.get('name')
103
- if not isinstance(name, str):
104
- raise TypeError(f'Connector{kind} requires a "name" (str)')
105
- return name
106
-
107
-
108
73
  # SECTION: DATA CLASSES ===================================================== #
109
74
 
110
75
 
@@ -118,12 +83,12 @@ class ConnectorApi:
118
83
  name : str
119
84
  Unique connector name.
120
85
  type : ConnectorType
121
- Connector kind literal, always ``'api'``.
86
+ Connector kind literal, always ``"api"``.
122
87
  url : str | None
123
88
  Direct absolute URL (when not using ``service``/``endpoint`` refs).
124
89
  method : str | None
125
90
  Optional HTTP method; typically omitted for sources (defaults to
126
- GET) and used for targets (e.g., ``'post'``).
91
+ GET) and used for targets (e.g., ``"post"``).
127
92
  headers : dict[str, str]
128
93
  Additional request headers.
129
94
  query_params : dict[str, Any]
@@ -146,7 +111,7 @@ class ConnectorApi:
146
111
 
147
112
  # Direct form
148
113
  url: str | None = None
149
- # Optional HTTP method; typically omitted for sources (defaults to GET)
114
+ # Optional HTTP method; typically omitted for sources (defaults to GET
150
115
  # at runtime) and used for targets (e.g., 'post', 'put').
151
116
  method: str | None = None
152
117
  headers: dict[str, str] = field(default_factory=dict)
@@ -185,8 +150,15 @@ class ConnectorApi:
185
150
  -------
186
151
  Self
187
152
  Parsed connector instance.
153
+
154
+ Raises
155
+ ------
156
+ TypeError
157
+ If ``name`` is missing or invalid.
188
158
  """
189
- name = _require_name(obj, kind='Api')
159
+ name = obj.get('name')
160
+ if not isinstance(name, str):
161
+ raise TypeError('ConnectorApi requires a "name" (str)')
190
162
  headers = cast_str_dict(obj.get('headers'))
191
163
 
192
164
  return cls(
@@ -213,7 +185,7 @@ class ConnectorDb:
213
185
  name : str
214
186
  Unique connector name.
215
187
  type : ConnectorType
216
- Connector kind literal, always ``'database'``.
188
+ Connector kind literal, always ``"database"``.
217
189
  connection_string : str | None
218
190
  Connection string/DSN for the database.
219
191
  query : str | None
@@ -221,7 +193,7 @@ class ConnectorDb:
221
193
  table : str | None
222
194
  Target/source table name (optional).
223
195
  mode : str | None
224
- Load mode hint (e.g., ``'append'``, ``'replace'``) — future use.
196
+ Load mode hint (e.g., ``"append"``, ``"replace"``) — future use.
225
197
  """
226
198
 
227
199
  # -- Attributes -- #
@@ -260,8 +232,15 @@ class ConnectorDb:
260
232
  -------
261
233
  Self
262
234
  Parsed connector instance.
235
+
236
+ Raises
237
+ ------
238
+ TypeError
239
+ If ``name`` is missing or invalid.
263
240
  """
264
- name = _require_name(obj, kind='Db')
241
+ name = obj.get('name')
242
+ if not isinstance(name, str):
243
+ raise TypeError('ConnectorDb requires a "name" (str)')
265
244
 
266
245
  return cls(
267
246
  name=name,
@@ -283,9 +262,9 @@ class ConnectorFile:
283
262
  name : str
284
263
  Unique connector name.
285
264
  type : ConnectorType
286
- Connector kind literal, always ``'file'``.
265
+ Connector kind literal, always ``"file"``.
287
266
  format : str | None
288
- File format (e.g., ``'json'``, ``'csv'``).
267
+ File format (e.g., ``"json"``, ``"csv"``).
289
268
  path : str | None
290
269
  File path or URI.
291
270
  options : dict[str, Any]
@@ -327,8 +306,15 @@ class ConnectorFile:
327
306
  -------
328
307
  Self
329
308
  Parsed connector instance.
309
+
310
+ Raises
311
+ ------
312
+ TypeError
313
+ If ``name`` is missing or invalid.
330
314
  """
331
- name = _require_name(obj, kind='File')
315
+ name = obj.get('name')
316
+ if not isinstance(name, str):
317
+ raise TypeError('ConnectorFile requires a "name" (str)')
332
318
 
333
319
  return cls(
334
320
  name=name,