etlplus 0.14.3__py3-none-any.whl → 0.15.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. etlplus/README.md +4 -4
  2. etlplus/api/README.md +33 -2
  3. etlplus/api/config.py +3 -8
  4. etlplus/api/types.py +89 -0
  5. etlplus/api/utils.py +5 -1
  6. etlplus/cli/README.md +2 -2
  7. etlplus/cli/commands.py +75 -42
  8. etlplus/cli/handlers.py +32 -14
  9. etlplus/cli/main.py +1 -1
  10. etlplus/cli/state.py +4 -7
  11. etlplus/database/README.md +2 -2
  12. etlplus/database/engine.py +18 -2
  13. etlplus/database/orm.py +2 -0
  14. etlplus/file/README.md +2 -2
  15. etlplus/file/_io.py +39 -0
  16. etlplus/file/json.py +2 -14
  17. etlplus/file/yaml.py +2 -14
  18. etlplus/ops/run.py +14 -9
  19. etlplus/ops/utils.py +4 -33
  20. etlplus/ops/validate.py +3 -3
  21. etlplus/templates/README.md +2 -2
  22. etlplus/types.py +3 -2
  23. etlplus/utils.py +136 -2
  24. etlplus/{config → workflow}/README.md +6 -6
  25. etlplus/{config → workflow}/__init__.py +10 -23
  26. etlplus/{config → workflow}/connector.py +58 -44
  27. etlplus/{dag.py → workflow/dag.py} +6 -4
  28. etlplus/{config → workflow}/jobs.py +101 -38
  29. etlplus/{config → workflow}/pipeline.py +57 -49
  30. etlplus/{config → workflow}/profile.py +8 -5
  31. etlplus/workflow/types.py +115 -0
  32. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/METADATA +4 -4
  33. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/RECORD +37 -38
  34. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/WHEEL +1 -1
  35. etlplus/config/types.py +0 -204
  36. etlplus/config/utils.py +0 -120
  37. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/entry_points.txt +0 -0
  38. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/licenses/LICENSE +0 -0
  39. {etlplus-0.14.3.dist-info → etlplus-0.15.2.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  """
2
- :mod:`etlplus.config.connector` module.
2
+ :mod:`etlplus.workflow.connector` module.
3
3
 
4
4
  A module defining configuration types for data source/target connectors in ETL
5
5
  pipelines. A "connector" is any I/O endpoint:
@@ -11,25 +11,26 @@ pipelines. A "connector" is any I/O endpoint:
11
11
 
12
12
  Examples
13
13
  --------
14
- - Use ``ConnectorApi``/``ConnectorFile``/``ConnectorDb`` when you want the
15
- concrete dataclasses.
16
- - Use the ``Connector`` union for typing a value that can be any connector.
17
- - Use ``parse_connector(obj)`` to construct a connector instance from a generic
18
- mapping that includes a ``type`` key.
14
+ - Use :class:`ConnectorApi`/:class:`ConnectorFile`/:class:`ConnectorDb` when
15
+ you want the concrete dataclasses.
16
+ - Use the :class:`Connector` union for typing a value that can be any
17
+ connector.
18
+ - Use :func:`parse_connector(obj)` to construct a connector instance from a
19
+ generic mapping that includes a *type* key.
19
20
 
20
21
  Notes
21
22
  -----
22
23
  - TypedDict shapes are editor hints; runtime parsing remains permissive
23
- (from_obj accepts Mapping[str, Any]).
24
- - TypedDicts referenced in :mod:`etlplus.config.types` remain editor hints.
25
- Runtime parsing stays permissive and tolerant.
24
+ (from_obj accepts Mapping[str, Any]).
25
+ - TypedDicts referenced in :mod:`etlplus.workflow.types` remain editor hints.
26
+ Runtime parsing stays permissive and tolerant.
26
27
 
27
28
  See Also
28
29
  --------
29
30
  - TypedDict shapes for editor hints (not enforced at runtime):
30
- :mod:`etlplus.config.types.ConnectorApiConfigMap`,
31
- :mod:`etlplus.config.types.ConnectorDbConfigMap`,
32
- :mod:`etlplus.config.types.ConnectorFileConfigMap`.
31
+ :mod:`etlplus.workflow.types.ConnectorApiConfigMap`,
32
+ :mod:`etlplus.workflow.types.ConnectorDbConfigMap`,
33
+ :mod:`etlplus.workflow.types.ConnectorFileConfigMap`.
33
34
  """
34
35
 
35
36
  from __future__ import annotations
@@ -59,7 +60,7 @@ if TYPE_CHECKING: # Editor-only typing hints to avoid runtime imports
59
60
 
60
61
 
61
62
  __all__ = [
62
- # Classes
63
+ # Data Classes
63
64
  'ConnectorApi',
64
65
  'ConnectorDb',
65
66
  'ConnectorFile',
@@ -70,6 +71,40 @@ __all__ = [
70
71
  ]
71
72
 
72
73
 
74
+ # SECTION: INTERNAL FUNCTIONS ============================================== #
75
+
76
+
77
+ def _require_name(
78
+ obj: StrAnyMap,
79
+ *,
80
+ kind: str,
81
+ ) -> str:
82
+ """
83
+ Extract and validate the ``name`` field from connector mappings.
84
+
85
+ Parameters
86
+ ----------
87
+ obj : StrAnyMap
88
+ Connector mapping with a ``name`` entry.
89
+ kind : str
90
+ Connector kind used in the error message.
91
+
92
+ Returns
93
+ -------
94
+ str
95
+ Valid connector name.
96
+
97
+ Raises
98
+ ------
99
+ TypeError
100
+ If ``name`` is missing or not a string.
101
+ """
102
+ name = obj.get('name')
103
+ if not isinstance(name, str):
104
+ raise TypeError(f'Connector{kind} requires a "name" (str)')
105
+ return name
106
+
107
+
73
108
  # SECTION: DATA CLASSES ===================================================== #
74
109
 
75
110
 
@@ -83,12 +118,12 @@ class ConnectorApi:
83
118
  name : str
84
119
  Unique connector name.
85
120
  type : ConnectorType
86
- Connector kind literal, always ``"api"``.
121
+ Connector kind literal, always ``'api'``.
87
122
  url : str | None
88
123
  Direct absolute URL (when not using ``service``/``endpoint`` refs).
89
124
  method : str | None
90
125
  Optional HTTP method; typically omitted for sources (defaults to
91
- GET) and used for targets (e.g., ``"post"``).
126
+ GET) and used for targets (e.g., ``'post'``).
92
127
  headers : dict[str, str]
93
128
  Additional request headers.
94
129
  query_params : dict[str, Any]
@@ -111,7 +146,7 @@ class ConnectorApi:
111
146
 
112
147
  # Direct form
113
148
  url: str | None = None
114
- # Optional HTTP method; typically omitted for sources (defaults to GET
149
+ # Optional HTTP method; typically omitted for sources (defaults to GET)
115
150
  # at runtime) and used for targets (e.g., 'post', 'put').
116
151
  method: str | None = None
117
152
  headers: dict[str, str] = field(default_factory=dict)
@@ -150,15 +185,8 @@ class ConnectorApi:
150
185
  -------
151
186
  Self
152
187
  Parsed connector instance.
153
-
154
- Raises
155
- ------
156
- TypeError
157
- If ``name`` is missing or invalid.
158
188
  """
159
- name = obj.get('name')
160
- if not isinstance(name, str):
161
- raise TypeError('ConnectorApi requires a "name" (str)')
189
+ name = _require_name(obj, kind='Api')
162
190
  headers = cast_str_dict(obj.get('headers'))
163
191
 
164
192
  return cls(
@@ -185,7 +213,7 @@ class ConnectorDb:
185
213
  name : str
186
214
  Unique connector name.
187
215
  type : ConnectorType
188
- Connector kind literal, always ``"database"``.
216
+ Connector kind literal, always ``'database'``.
189
217
  connection_string : str | None
190
218
  Connection string/DSN for the database.
191
219
  query : str | None
@@ -193,7 +221,7 @@ class ConnectorDb:
193
221
  table : str | None
194
222
  Target/source table name (optional).
195
223
  mode : str | None
196
- Load mode hint (e.g., ``"append"``, ``"replace"``) — future use.
224
+ Load mode hint (e.g., ``'append'``, ``'replace'``) — future use.
197
225
  """
198
226
 
199
227
  # -- Attributes -- #
@@ -232,15 +260,8 @@ class ConnectorDb:
232
260
  -------
233
261
  Self
234
262
  Parsed connector instance.
235
-
236
- Raises
237
- ------
238
- TypeError
239
- If ``name`` is missing or invalid.
240
263
  """
241
- name = obj.get('name')
242
- if not isinstance(name, str):
243
- raise TypeError('ConnectorDb requires a "name" (str)')
264
+ name = _require_name(obj, kind='Db')
244
265
 
245
266
  return cls(
246
267
  name=name,
@@ -262,9 +283,9 @@ class ConnectorFile:
262
283
  name : str
263
284
  Unique connector name.
264
285
  type : ConnectorType
265
- Connector kind literal, always ``"file"``.
286
+ Connector kind literal, always ``'file'``.
266
287
  format : str | None
267
- File format (e.g., ``"json"``, ``"csv"``).
288
+ File format (e.g., ``'json'``, ``'csv'``).
268
289
  path : str | None
269
290
  File path or URI.
270
291
  options : dict[str, Any]
@@ -306,15 +327,8 @@ class ConnectorFile:
306
327
  -------
307
328
  Self
308
329
  Parsed connector instance.
309
-
310
- Raises
311
- ------
312
- TypeError
313
- If ``name`` is missing or invalid.
314
330
  """
315
- name = obj.get('name')
316
- if not isinstance(name, str):
317
- raise TypeError('ConnectorFile requires a "name" (str)')
331
+ name = _require_name(obj, kind='File')
318
332
 
319
333
  return cls(
320
334
  name=name,
@@ -1,8 +1,8 @@
1
1
  """
2
- :mod:`etlplus.dag` module.
2
+ :mod:`etlplus.workflow.dag` module.
3
3
 
4
4
  Lightweight directed acyclic graph (DAG) helpers for ordering jobs based on
5
- ``depends_on``.
5
+ :attr:`depends_on`.
6
6
  """
7
7
 
8
8
  from __future__ import annotations
@@ -10,13 +10,15 @@ from __future__ import annotations
10
10
  from collections import deque
11
11
  from dataclasses import dataclass
12
12
 
13
- from .config.jobs import JobConfig
13
+ from .jobs import JobConfig
14
14
 
15
15
  # SECTION: EXPORTS ========================================================== #
16
16
 
17
17
 
18
18
  __all__ = [
19
+ # Errors
19
20
  'DagError',
21
+ # Functions
20
22
  'topological_sort_jobs',
21
23
  ]
22
24
 
@@ -52,7 +54,7 @@ def topological_sort_jobs(
52
54
  jobs: list[JobConfig],
53
55
  ) -> list[JobConfig]:
54
56
  """
55
- Return jobs in topological order based on ``depends_on``.
57
+ Return jobs in topological order based on :attr:`depends_on`.
56
58
 
57
59
  Parameters
58
60
  ----------
@@ -1,12 +1,12 @@
1
1
  """
2
- :mod:`etlplus.config.jobs` module.
2
+ :mod:`etlplus.workflow.jobs` module.
3
3
 
4
4
  Data classes modeling job orchestration references (extract, validate,
5
5
  transform, load).
6
6
 
7
7
  Notes
8
8
  -----
9
- - Lightweight references used inside ``PipelineConfig`` to avoid storing
9
+ - Lightweight references used inside :class:`PipelineConfig` to avoid storing
10
10
  large nested structures.
11
11
  - All attributes are simple and optional where appropriate, keeping parsing
12
12
  tolerant.
@@ -19,6 +19,7 @@ from dataclasses import field
19
19
  from typing import Any
20
20
  from typing import Self
21
21
 
22
+ from ..types import StrAnyMap
22
23
  from ..utils import coerce_dict
23
24
  from ..utils import maybe_mapping
24
25
 
@@ -26,6 +27,7 @@ from ..utils import maybe_mapping
26
27
 
27
28
 
28
29
  __all__ = [
30
+ # Data Classes
29
31
  'ExtractRef',
30
32
  'JobConfig',
31
33
  'LoadRef',
@@ -34,6 +36,75 @@ __all__ = [
34
36
  ]
35
37
 
36
38
 
39
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
40
+
41
+
42
+ def _coerce_optional_str(value: Any) -> str | None:
43
+ """
44
+ Normalize optional string values, coercing non-strings when needed.
45
+
46
+ Parameters
47
+ ----------
48
+ value : Any
49
+ Optional value to normalize.
50
+
51
+ Returns
52
+ -------
53
+ str | None
54
+ ``None`` when ``value`` is ``None``; otherwise a string value.
55
+ """
56
+ if value is None:
57
+ return None
58
+ return value if isinstance(value, str) else str(value)
59
+
60
+
61
+ def _parse_depends_on(
62
+ value: Any,
63
+ ) -> list[str]:
64
+ """
65
+ Normalize dependency declarations into a string list.
66
+
67
+ Parameters
68
+ ----------
69
+ value : Any
70
+ Input dependency specification (string or list of strings).
71
+
72
+ Returns
73
+ -------
74
+ list[str]
75
+ Normalized dependency list.
76
+ """
77
+ if isinstance(value, str):
78
+ return [value]
79
+ if isinstance(value, list):
80
+ return [entry for entry in value if isinstance(entry, str)]
81
+ return []
82
+
83
+
84
+ def _require_str(
85
+ # data: dict[str, Any],
86
+ data: StrAnyMap,
87
+ key: str,
88
+ ) -> str | None:
89
+ """
90
+ Extract a required string field from a mapping.
91
+
92
+ Parameters
93
+ ----------
94
+ data : StrAnyMap
95
+ Mapping containing the target field.
96
+ key : str
97
+ Field name to extract.
98
+
99
+ Returns
100
+ -------
101
+ str | None
102
+ The string value when present and valid; otherwise ``None``.
103
+ """
104
+ value = data.get(key)
105
+ return value if isinstance(value, str) else None
106
+
107
+
37
108
  # SECTION: DATA CLASSES ===================================================== #
38
109
 
39
110
 
@@ -62,12 +133,13 @@ class ExtractRef:
62
133
  cls,
63
134
  obj: Any,
64
135
  ) -> Self | None:
65
- """Parse a mapping into an :class:`ExtractRef` instance.
136
+ """
137
+ Parse a mapping into an :class:`ExtractRef` instance.
66
138
 
67
139
  Parameters
68
140
  ----------
69
141
  obj : Any
70
- Mapping with ``source`` and optional ``options``.
142
+ Mapping with :attr:`source` and optional :attr:`options`.
71
143
 
72
144
  Returns
73
145
  -------
@@ -77,8 +149,8 @@ class ExtractRef:
77
149
  data = maybe_mapping(obj)
78
150
  if not data:
79
151
  return None
80
- source = data.get('source')
81
- if not isinstance(source, str):
152
+ source = _require_str(data, 'source')
153
+ if source is None:
82
154
  return None
83
155
  return cls(
84
156
  source=source,
@@ -126,7 +198,8 @@ class JobConfig:
126
198
  cls,
127
199
  obj: Any,
128
200
  ) -> Self | None:
129
- """Parse a mapping into a :class:`JobConfig` instance.
201
+ """
202
+ Parse a mapping into a :class:`JobConfig` instance.
130
203
 
131
204
  Parameters
132
205
  ----------
@@ -141,22 +214,13 @@ class JobConfig:
141
214
  data = maybe_mapping(obj)
142
215
  if not data:
143
216
  return None
144
- name = data.get('name')
145
- if not isinstance(name, str):
217
+ name = _require_str(data, 'name')
218
+ if name is None:
146
219
  return None
147
220
 
148
- description = data.get('description')
149
- if description is not None and not isinstance(description, str):
150
- description = str(description)
221
+ description = _coerce_optional_str(data.get('description'))
151
222
 
152
- depends_raw = data.get('depends_on')
153
- depends_on: list[str] = []
154
- if isinstance(depends_raw, str):
155
- depends_on = [depends_raw]
156
- elif isinstance(depends_raw, list):
157
- for entry in depends_raw:
158
- if isinstance(entry, str):
159
- depends_on.append(entry)
223
+ depends_on = _parse_depends_on(data.get('depends_on'))
160
224
 
161
225
  return cls(
162
226
  name=name,
@@ -194,12 +258,13 @@ class LoadRef:
194
258
  cls,
195
259
  obj: Any,
196
260
  ) -> Self | None:
197
- """Parse a mapping into a :class:`LoadRef` instance.
261
+ """
262
+ Parse a mapping into a :class:`LoadRef` instance.
198
263
 
199
264
  Parameters
200
265
  ----------
201
266
  obj : Any
202
- Mapping with ``target`` and optional ``overrides``.
267
+ Mapping with :attr:`target` and optional :attr:`overrides`.
203
268
 
204
269
  Returns
205
270
  -------
@@ -209,8 +274,8 @@ class LoadRef:
209
274
  data = maybe_mapping(obj)
210
275
  if not data:
211
276
  return None
212
- target = data.get('target')
213
- if not isinstance(target, str):
277
+ target = _require_str(data, 'target')
278
+ if target is None:
214
279
  return None
215
280
  return cls(
216
281
  target=target,
@@ -240,12 +305,13 @@ class TransformRef:
240
305
  cls,
241
306
  obj: Any,
242
307
  ) -> Self | None:
243
- """Parse a mapping into a :class:`TransformRef` instance.
308
+ """
309
+ Parse a mapping into a :class:`TransformRef` instance.
244
310
 
245
311
  Parameters
246
312
  ----------
247
313
  obj : Any
248
- Mapping with ``pipeline``.
314
+ Mapping with :attr:`pipeline`.
249
315
 
250
316
  Returns
251
317
  -------
@@ -255,8 +321,8 @@ class TransformRef:
255
321
  data = maybe_mapping(obj)
256
322
  if not data:
257
323
  return None
258
- pipeline = data.get('pipeline')
259
- if not isinstance(pipeline, str):
324
+ pipeline = _require_str(data, 'pipeline')
325
+ if pipeline is None:
260
326
  return None
261
327
  return cls(pipeline=pipeline)
262
328
 
@@ -290,12 +356,13 @@ class ValidationRef:
290
356
  cls,
291
357
  obj: Any,
292
358
  ) -> Self | None:
293
- """Parse a mapping into a :class:`ValidationRef` instance.
359
+ """
360
+ Parse a mapping into a :class:`ValidationRef` instance.
294
361
 
295
362
  Parameters
296
363
  ----------
297
364
  obj : Any
298
- Mapping with ``ruleset`` plus optional metadata.
365
+ Mapping with :attr:`ruleset` plus optional metadata.
299
366
 
300
367
  Returns
301
368
  -------
@@ -305,15 +372,11 @@ class ValidationRef:
305
372
  data = maybe_mapping(obj)
306
373
  if not data:
307
374
  return None
308
- ruleset = data.get('ruleset')
309
- if not isinstance(ruleset, str):
375
+ ruleset = _require_str(data, 'ruleset')
376
+ if ruleset is None:
310
377
  return None
311
- severity = data.get('severity')
312
- if severity is not None and not isinstance(severity, str):
313
- severity = str(severity)
314
- phase = data.get('phase')
315
- if phase is not None and not isinstance(phase, str):
316
- phase = str(phase)
378
+ severity = _coerce_optional_str(data.get('severity'))
379
+ phase = _coerce_optional_str(data.get('phase'))
317
380
  return cls(
318
381
  ruleset=ruleset,
319
382
  severity=severity,
@@ -1,5 +1,5 @@
1
1
  """
2
- :mod:`etlplus.config.pipeline` module.
2
+ :mod:`etlplus.workflow.pipeline` module.
3
3
 
4
4
  Pipeline configuration model and helpers for job orchestration.
5
5
 
@@ -16,6 +16,7 @@ Notes
16
16
  from __future__ import annotations
17
17
 
18
18
  import os
19
+ from collections.abc import Callable
19
20
  from collections.abc import Mapping
20
21
  from dataclasses import dataclass
21
22
  from dataclasses import field
@@ -28,68 +29,86 @@ from ..file import File
28
29
  from ..file import FileFormat
29
30
  from ..types import StrAnyMap
30
31
  from ..utils import coerce_dict
32
+ from ..utils import deep_substitute
31
33
  from ..utils import maybe_mapping
32
34
  from .connector import Connector
33
35
  from .connector import parse_connector
34
36
  from .jobs import JobConfig
35
37
  from .profile import ProfileConfig
36
- from .utils import deep_substitute
37
38
 
38
39
  # SECTION: EXPORTS ========================================================== #
39
40
 
40
41
 
41
- __all__ = ['PipelineConfig', 'load_pipeline_config']
42
+ __all__ = [
43
+ # Data Classes
44
+ 'PipelineConfig',
45
+ # Functions
46
+ 'load_pipeline_config',
47
+ ]
42
48
 
43
49
 
44
- def _build_jobs(
50
+ # SECTION: INTERNAL FUNCTIONS =============================================== #
51
+
52
+
53
+ def _collect_parsed[T](
45
54
  raw: StrAnyMap,
46
- ) -> list[JobConfig]:
55
+ key: str,
56
+ parser: Callable[[Any], T | None],
57
+ ) -> list[T]:
47
58
  """
48
- Return a list of ``JobConfig`` objects parsed from the mapping.
59
+ Collect parsed items from ``raw[key]`` using a tolerant parser.
49
60
 
50
61
  Parameters
51
62
  ----------
52
63
  raw : StrAnyMap
53
64
  Raw pipeline mapping.
65
+ key : str
66
+ Key pointing to a list-like payload.
67
+ parser : Callable[[Any], T | None]
68
+ Parser that returns an instance or ``None`` for invalid entries.
54
69
 
55
70
  Returns
56
71
  -------
57
- list[JobConfig]
58
- Parsed job configurations.
72
+ list[T]
73
+ Parsed items, excluding invalid entries.
59
74
  """
60
- jobs: list[JobConfig] = []
61
- for job_raw in raw.get('jobs', []) or []:
62
- job_cfg = JobConfig.from_obj(job_raw)
63
- if job_cfg is not None:
64
- jobs.append(job_cfg)
65
-
66
- return jobs
75
+ items: list[T] = []
76
+ for entry in raw.get(key, []) or []:
77
+ parsed = parser(entry)
78
+ if parsed is not None:
79
+ items.append(parsed)
80
+ return items
67
81
 
68
82
 
69
- def _build_sources(
70
- raw: StrAnyMap,
71
- ) -> list[Connector]:
83
+ def _parse_connector_entry(
84
+ obj: Any,
85
+ ) -> Connector | None:
72
86
  """
73
- Return a list of source connectors parsed from the mapping.
87
+ Parse a connector mapping into a concrete connector instance.
74
88
 
75
89
  Parameters
76
90
  ----------
77
- raw : StrAnyMap
78
- Raw pipeline mapping.
91
+ obj : Any
92
+ Candidate connector mapping.
79
93
 
80
94
  Returns
81
95
  -------
82
- list[Connector]
83
- Parsed source connectors.
96
+ Connector | None
97
+ Parsed connector instance or ``None`` when invalid.
84
98
  """
85
- return _build_connectors(raw, 'sources')
99
+ if not (entry := maybe_mapping(obj)):
100
+ return None
101
+ try:
102
+ return parse_connector(entry)
103
+ except TypeError:
104
+ return None
86
105
 
87
106
 
88
- def _build_targets(
107
+ def _build_sources(
89
108
  raw: StrAnyMap,
90
109
  ) -> list[Connector]:
91
110
  """
92
- Return a list of target connectors parsed from the mapping.
111
+ Return a list of source connectors parsed from the mapping.
93
112
 
94
113
  Parameters
95
114
  ----------
@@ -99,43 +118,32 @@ def _build_targets(
99
118
  Returns
100
119
  -------
101
120
  list[Connector]
102
- Parsed target connectors.
121
+ Parsed source connectors.
103
122
  """
104
- return _build_connectors(raw, 'targets')
123
+ return list(
124
+ _collect_parsed(raw, 'sources', _parse_connector_entry),
125
+ )
105
126
 
106
127
 
107
- def _build_connectors(
128
+ def _build_targets(
108
129
  raw: StrAnyMap,
109
- key: str,
110
130
  ) -> list[Connector]:
111
131
  """
112
- Return parsed connectors from ``raw[key]`` using tolerant parsing.
113
-
114
- Unknown or malformed entries are skipped to preserve permissiveness.
132
+ Return a list of target connectors parsed from the mapping.
115
133
 
116
134
  Parameters
117
135
  ----------
118
136
  raw : StrAnyMap
119
137
  Raw pipeline mapping.
120
- key : str
121
- List-containing top-level key ("sources" or "targets").
122
138
 
123
139
  Returns
124
140
  -------
125
141
  list[Connector]
126
- Constructed connector instances (malformed entries skipped).
142
+ Parsed target connectors.
127
143
  """
128
- items: list[Connector] = []
129
- for obj in raw.get(key, []) or []:
130
- if not (entry := maybe_mapping(obj)):
131
- continue
132
- try:
133
- items.append(parse_connector(entry))
134
- except TypeError:
135
- # Skip unsupported types or malformed entries
136
- continue
137
-
138
- return items
144
+ return list(
145
+ _collect_parsed(raw, 'targets', _parse_connector_entry),
146
+ )
139
147
 
140
148
 
141
149
  # SECTION: FUNCTIONS ======================================================== #
@@ -156,7 +164,7 @@ def load_pipeline_config(
156
164
  return PipelineConfig.from_yaml(path, substitute=substitute, env=env)
157
165
 
158
166
 
159
- # SECTION: CLASSES ========================================================== #
167
+ # SECTION: DATA CLASSES ===================================================== #
160
168
 
161
169
 
162
170
  @dataclass(kw_only=True, slots=True)
@@ -313,7 +321,7 @@ class PipelineConfig:
313
321
  targets = _build_targets(raw)
314
322
 
315
323
  # Jobs
316
- jobs = _build_jobs(raw)
324
+ jobs = _collect_parsed(raw, 'jobs', JobConfig.from_obj)
317
325
 
318
326
  # Table schemas (optional, tolerant pass-through structures).
319
327
  table_schemas: list[dict[str, Any]] = []