behave-toolkit 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,61 @@
1
+ """Public package interface for behave-toolkit."""
2
+
3
+ from .config import (
4
+ LoggerSpec,
5
+ LoggingConfig,
6
+ ObjectSpec,
7
+ ParserConfig,
8
+ ParserTypeSpec,
9
+ ToolkitConfig,
10
+ load_config,
11
+ load_yaml_file,
12
+ load_yaml_text,
13
+ )
14
+ from .cycles import expand_scenario_cycles, format_cycle_progress, get_cycle_progress
15
+ from .errors import ConfigError, DocumentationError, IntegrationError, ToolkitError
16
+ from .logging_support import configure_test_logging
17
+ from .parsers import configure_parsers
18
+ from .plugin import (
19
+ LifecycleManager,
20
+ activate_feature_scope,
21
+ activate_global_scope,
22
+ activate_scenario_scope,
23
+ activate_scope,
24
+ configure_loggers,
25
+ install,
26
+ )
27
+ from .scopes import Scope
28
+ from .step_docs import DocumentationResult, generate_step_docs
29
+
30
+ __all__ = [
31
+ "ConfigError",
32
+ "configure_loggers",
33
+ "configure_test_logging",
34
+ "DocumentationError",
35
+ "DocumentationResult",
36
+ "expand_scenario_cycles",
37
+ "format_cycle_progress",
38
+ "get_cycle_progress",
39
+ "IntegrationError",
40
+ "LifecycleManager",
41
+ "LoggerSpec",
42
+ "LoggingConfig",
43
+ "ObjectSpec",
44
+ "ParserConfig",
45
+ "ParserTypeSpec",
46
+ "Scope",
47
+ "ToolkitConfig",
48
+ "ToolkitError",
49
+ "activate_feature_scope",
50
+ "activate_global_scope",
51
+ "activate_scenario_scope",
52
+ "activate_scope",
53
+ "configure_parsers",
54
+ "generate_step_docs",
55
+ "install",
56
+ "load_config",
57
+ "load_yaml_file",
58
+ "load_yaml_text",
59
+ ]
60
+
61
+ __version__ = "0.1.0"
@@ -0,0 +1,606 @@
1
+ from __future__ import annotations
2
+
3
+ from dataclasses import dataclass, field
4
+ import logging
5
+ from pathlib import Path
6
+ from typing import Any, Mapping
7
+
8
+ import yaml
9
+
10
+ from .errors import ConfigError
11
+ from .scopes import Scope
12
+
13
+ PARSER_TYPE_MATCHERS = frozenset({"parse", "cfparse"})
14
+ ENUM_LOOKUPS = frozenset({"name", "value"})
15
+
16
+
17
+ @dataclass(frozen=True, slots=True)
18
+ class ObjectSpec:
19
+ """Configuration for one named object in the toolkit container."""
20
+
21
+ name: str
22
+ factory: str
23
+ scope: Scope = Scope.SCENARIO
24
+ args: tuple[Any, ...] = ()
25
+ kwargs: dict[str, Any] = field(default_factory=dict)
26
+ cleanup: str | None = None
27
+ inject_as: str | None = None
28
+
29
+ @property
30
+ def context_name(self) -> str:
31
+ return self.inject_as or self.name
32
+
33
+
34
+ @dataclass(frozen=True, slots=True)
35
+ # pylint: disable=too-many-instance-attributes
36
+ class ParserTypeSpec:
37
+ """Configuration for one registered Behave custom type."""
38
+
39
+ name: str
40
+ converter: str | None = None
41
+ enum: str | None = None
42
+ pattern: str | None = None
43
+ regex_group_count: int | None = None
44
+ matcher: str | None = None
45
+ case_sensitive: bool = True
46
+ lookup: str = "value"
47
+
48
+
49
+ @dataclass(frozen=True, slots=True)
50
+ class ParserConfig:
51
+ """Configuration for parser/type registration helpers."""
52
+
53
+ step_matcher: str | None = None
54
+ types: dict[str, ParserTypeSpec] = field(default_factory=dict)
55
+
56
+
57
+ @dataclass(frozen=True, slots=True)
58
+ class LoggerSpec:
59
+ """Configuration for one named test logger."""
60
+
61
+ name: str
62
+ path: Any
63
+ level: int | str = "INFO"
64
+ logger_name: str | None = None
65
+ console: bool = True
66
+ mode: str = "w"
67
+ inject_as: str | None = None
68
+
69
+ @property
70
+ def context_name(self) -> str:
71
+ return self.inject_as or self.name
72
+
73
+ @property
74
+ def scope(self) -> Scope:
75
+ return Scope.GLOBAL
76
+
77
+ @property
78
+ def effective_logger_name(self) -> str:
79
+ return self.logger_name or self.name
80
+
81
+
82
+ @dataclass(frozen=True, slots=True)
83
+ class LoggingConfig:
84
+ """Configuration for named test loggers."""
85
+
86
+ loggers: dict[str, LoggerSpec] = field(default_factory=dict)
87
+
88
+
89
+ @dataclass(frozen=True, slots=True)
90
+ class ToolkitConfig:
91
+ """Normalized project configuration."""
92
+
93
+ version: int
94
+ variables: dict[str, Any] = field(default_factory=dict)
95
+ objects: dict[str, ObjectSpec] = field(default_factory=dict)
96
+ parsers: ParserConfig = field(default_factory=ParserConfig)
97
+ logging: LoggingConfig = field(default_factory=LoggingConfig)
98
+
99
+ def require(self, name: str) -> ObjectSpec:
100
+ try:
101
+ return self.objects[name]
102
+ except KeyError as exc:
103
+ known = ", ".join(sorted(self.objects)) or "<none>"
104
+ raise KeyError(f"Unknown object '{name}'. Known objects: {known}") from exc
105
+
106
+ def require_variable(self, name: str) -> Any:
107
+ try:
108
+ return self.variables[name]
109
+ except KeyError as exc:
110
+ known = ", ".join(sorted(self.variables)) or "<none>"
111
+ raise KeyError(f"Unknown variable '{name}'. Known variables: {known}") from exc
112
+
113
+
114
+ def load_yaml_text(text: str) -> ToolkitConfig:
115
+ """Load toolkit configuration from a YAML string."""
116
+
117
+ return load_config(_load_yaml_mapping(text, source="from text"))
118
+
119
+
120
+ def load_yaml_file(path: str | Path) -> ToolkitConfig:
121
+ """Load toolkit configuration from a YAML file or config directory."""
122
+
123
+ config_path = Path(path).expanduser().resolve()
124
+ try:
125
+ if config_path.is_dir():
126
+ return load_config(_load_yaml_directory(config_path))
127
+ return load_config(_load_yaml_file_mapping(config_path))
128
+ except ConfigError as exc:
129
+ message = str(exc)
130
+ if str(config_path) in message:
131
+ raise
132
+ raise ConfigError(
133
+ f"Invalid behave-toolkit config '{config_path}': {message}"
134
+ ) from exc
135
+
136
+
137
+ def _load_yaml_mapping(text: str, *, source: str) -> Mapping[str, Any]:
138
+ try:
139
+ raw = yaml.safe_load(text)
140
+ except yaml.YAMLError as exc:
141
+ raise ConfigError(
142
+ f"Could not parse behave-toolkit config {source}: {exc}"
143
+ ) from exc
144
+
145
+ if raw is None:
146
+ return {}
147
+ if not isinstance(raw, Mapping):
148
+ raise ConfigError(
149
+ f"The YAML root in behave-toolkit config {source} must be a mapping."
150
+ )
151
+ return raw
152
+
153
+
154
+ def _load_yaml_file_mapping(path: Path) -> Mapping[str, Any]:
155
+ try:
156
+ raw_text = path.read_text(encoding="utf-8")
157
+ except OSError as exc:
158
+ raise ConfigError(
159
+ f"Could not read behave-toolkit config '{path}': {exc}"
160
+ ) from exc
161
+ return _load_yaml_mapping(raw_text, source=f"at '{path}'")
162
+
163
+
164
+ def _load_yaml_directory(directory: Path) -> Mapping[str, Any]:
165
+ yaml_files = sorted(
166
+ [
167
+ *directory.rglob("*.yaml"),
168
+ *directory.rglob("*.yml"),
169
+ ],
170
+ key=lambda path: str(path.relative_to(directory)).lower(),
171
+ )
172
+ if not yaml_files:
173
+ raise ConfigError(
174
+ f"Could not find any YAML files in behave-toolkit config directory "
175
+ f"'{directory}'."
176
+ )
177
+
178
+ merged: dict[str, Any] = {
179
+ "version": 1,
180
+ "variables": {},
181
+ "objects": {},
182
+ "parsers": {"types": {}},
183
+ "logging": {},
184
+ }
185
+ version_source: Path | None = None
186
+ variable_sources: dict[str, Path] = {}
187
+ object_sources: dict[str, Path] = {}
188
+ parser_type_sources: dict[str, Path] = {}
189
+ logger_sources: dict[str, Path] = {}
190
+ parser_step_matcher_source: Path | None = None
191
+
192
+ for yaml_file in yaml_files:
193
+ raw = _load_yaml_file_mapping(yaml_file)
194
+ if "version" in raw:
195
+ version = raw["version"]
196
+ if version_source is None:
197
+ merged["version"] = version
198
+ version_source = yaml_file
199
+ elif merged["version"] != version:
200
+ raise ConfigError(
201
+ "Invalid behave-toolkit config directory "
202
+ f"'{directory}': 'version' is defined more than once with "
203
+ f"different values in '{version_source}' and '{yaml_file}'."
204
+ )
205
+
206
+ _merge_named_section(
207
+ merged_section=merged["variables"],
208
+ raw_root=raw,
209
+ section_name="variables",
210
+ file_path=yaml_file,
211
+ root_path=directory,
212
+ sources=variable_sources,
213
+ )
214
+ _merge_named_section(
215
+ merged_section=merged["objects"],
216
+ raw_root=raw,
217
+ section_name="objects",
218
+ file_path=yaml_file,
219
+ root_path=directory,
220
+ sources=object_sources,
221
+ )
222
+ _merge_parser_section(
223
+ merged_parsers=merged["parsers"],
224
+ raw_root=raw,
225
+ file_path=yaml_file,
226
+ root_path=directory,
227
+ parser_type_sources=parser_type_sources,
228
+ parser_step_matcher_source=parser_step_matcher_source,
229
+ )
230
+ if "parsers" in raw and isinstance(raw.get("parsers"), Mapping):
231
+ step_matcher = raw["parsers"].get("step_matcher")
232
+ if step_matcher is not None:
233
+ parser_step_matcher_source = yaml_file
234
+ _merge_named_section(
235
+ merged_section=merged["logging"],
236
+ raw_root=raw,
237
+ section_name="logging",
238
+ file_path=yaml_file,
239
+ root_path=directory,
240
+ sources=logger_sources,
241
+ )
242
+
243
+ return merged
244
+
245
+
246
+ def _merge_named_section( # pylint: disable=too-many-arguments
247
+ *,
248
+ merged_section: dict[str, Any],
249
+ raw_root: Mapping[str, Any],
250
+ section_name: str,
251
+ file_path: Path,
252
+ root_path: Path,
253
+ sources: dict[str, Path],
254
+ ) -> None:
255
+ raw_section = raw_root.get(section_name)
256
+ if raw_section is None:
257
+ return
258
+ if not isinstance(raw_section, Mapping):
259
+ raise ConfigError(
260
+ "Invalid behave-toolkit config directory "
261
+ f"'{root_path}': Section '{section_name}' in '{file_path}' must be a "
262
+ "mapping."
263
+ )
264
+
265
+ for name, value in raw_section.items():
266
+ normalized_name = str(name)
267
+ previous_source = sources.get(normalized_name)
268
+ if previous_source is not None:
269
+ raise ConfigError(
270
+ "Invalid behave-toolkit config directory "
271
+ f"'{root_path}': Section '{section_name}' defines '{normalized_name}' "
272
+ f"more than once in '{previous_source}' and '{file_path}'."
273
+ )
274
+ merged_section[normalized_name] = value
275
+ sources[normalized_name] = file_path
276
+
277
+
278
+ def _merge_parser_section( # pylint: disable=too-many-arguments
279
+ *,
280
+ merged_parsers: dict[str, Any],
281
+ raw_root: Mapping[str, Any],
282
+ file_path: Path,
283
+ root_path: Path,
284
+ parser_type_sources: dict[str, Path],
285
+ parser_step_matcher_source: Path | None,
286
+ ) -> None:
287
+ raw_parsers = raw_root.get("parsers")
288
+ if raw_parsers is None:
289
+ return
290
+ if not isinstance(raw_parsers, Mapping):
291
+ raise ConfigError(
292
+ "Invalid behave-toolkit config directory "
293
+ f"'{root_path}': Section 'parsers' in '{file_path}' must be a mapping."
294
+ )
295
+
296
+ step_matcher = raw_parsers.get("step_matcher")
297
+ if step_matcher is not None:
298
+ if parser_step_matcher_source is not None:
299
+ raise ConfigError(
300
+ "Invalid behave-toolkit config directory "
301
+ f"'{root_path}': 'parsers.step_matcher' is defined more than once "
302
+ f"in '{parser_step_matcher_source}' and '{file_path}'."
303
+ )
304
+ merged_parsers["step_matcher"] = step_matcher
305
+
306
+ raw_parser_types = raw_parsers.get("types")
307
+ if raw_parser_types is None:
308
+ return
309
+ if not isinstance(raw_parser_types, Mapping):
310
+ raise ConfigError(
311
+ "Invalid behave-toolkit config directory "
312
+ f"'{root_path}': Section 'parsers.types' in '{file_path}' must be a "
313
+ "mapping."
314
+ )
315
+
316
+ merged_types = merged_parsers["types"]
317
+ for name, value in raw_parser_types.items():
318
+ normalized_name = str(name)
319
+ previous_source = parser_type_sources.get(normalized_name)
320
+ if previous_source is not None:
321
+ raise ConfigError(
322
+ "Invalid behave-toolkit config directory "
323
+ f"'{root_path}': Section 'parsers.types' defines "
324
+ f"'{normalized_name}' more than once in '{previous_source}' and "
325
+ f"'{file_path}'."
326
+ )
327
+ merged_types[normalized_name] = value
328
+ parser_type_sources[normalized_name] = file_path
329
+
330
+
331
+ def load_config(raw: Mapping[str, Any]) -> ToolkitConfig:
332
+ """Normalize validated config data into dataclasses."""
333
+
334
+ version = raw.get("version", 1)
335
+ if not isinstance(version, int):
336
+ raise ConfigError("The config 'version' must be an integer.")
337
+
338
+ raw_variables = raw.get("variables", {})
339
+ if not isinstance(raw_variables, Mapping):
340
+ raise ConfigError("The config 'variables' section must be a mapping.")
341
+
342
+ raw_objects = raw.get("objects", {})
343
+ if not isinstance(raw_objects, Mapping):
344
+ raise ConfigError("The config 'objects' section must be a mapping.")
345
+
346
+ raw_parsers = raw.get("parsers", {})
347
+ if not isinstance(raw_parsers, Mapping):
348
+ raise ConfigError("The config 'parsers' section must be a mapping.")
349
+
350
+ raw_logging = raw.get("logging", {})
351
+ if not isinstance(raw_logging, Mapping):
352
+ raise ConfigError("The config 'logging' section must be a mapping.")
353
+
354
+ return ToolkitConfig(
355
+ version=version,
356
+ variables=dict(raw_variables),
357
+ objects=_load_object_specs(raw_objects),
358
+ parsers=_load_parser_config(raw_parsers),
359
+ logging=_load_logging_config(raw_logging),
360
+ )
361
+
362
+
363
+ def _load_object_specs(raw_objects: Mapping[str, Any]) -> dict[str, ObjectSpec]:
364
+ objects: dict[str, ObjectSpec] = {}
365
+ for name, definition in raw_objects.items():
366
+ if not isinstance(definition, Mapping):
367
+ raise ConfigError(f"Object '{name}' must be defined as a mapping.")
368
+
369
+ factory = definition.get("factory")
370
+ if not isinstance(factory, str) or not factory.strip():
371
+ raise ConfigError(
372
+ f"Object '{name}' must define a non-empty 'factory' string."
373
+ )
374
+
375
+ raw_args = definition.get("args", [])
376
+ raw_kwargs = definition.get("kwargs", {})
377
+ if not isinstance(raw_args, list):
378
+ raise ConfigError(f"Object '{name}' field 'args' must be a list.")
379
+ if not isinstance(raw_kwargs, Mapping):
380
+ raise ConfigError(f"Object '{name}' field 'kwargs' must be a mapping.")
381
+
382
+ cleanup = definition.get("cleanup")
383
+ if cleanup is not None and not isinstance(cleanup, str):
384
+ raise ConfigError(
385
+ f"Object '{name}' field 'cleanup' must be a string if provided."
386
+ )
387
+
388
+ inject_as = definition.get("inject_as")
389
+ if inject_as is not None and not isinstance(inject_as, str):
390
+ raise ConfigError(
391
+ f"Object '{name}' field 'inject_as' must be a string if provided."
392
+ )
393
+
394
+ objects[str(name)] = ObjectSpec(
395
+ name=str(name),
396
+ factory=factory.strip(),
397
+ scope=Scope.parse(definition.get("scope", Scope.SCENARIO.value)),
398
+ args=tuple(raw_args),
399
+ kwargs=dict(raw_kwargs),
400
+ cleanup=cleanup,
401
+ inject_as=inject_as,
402
+ )
403
+ return objects
404
+
405
+
406
+ def _load_parser_config(raw_parsers: Mapping[str, Any]) -> ParserConfig:
407
+ step_matcher = raw_parsers.get("step_matcher")
408
+ if step_matcher is not None:
409
+ if not isinstance(step_matcher, str) or not step_matcher.strip():
410
+ raise ConfigError(
411
+ "The config 'parsers.step_matcher' field must be a non-empty "
412
+ "string if provided."
413
+ )
414
+ step_matcher = step_matcher.strip()
415
+
416
+ raw_parser_types = raw_parsers.get("types", {})
417
+ if not isinstance(raw_parser_types, Mapping):
418
+ raise ConfigError("The config 'parsers.types' section must be a mapping.")
419
+
420
+ parser_types = {
421
+ str(raw_name): _load_parser_type_spec(str(raw_name), raw_definition)
422
+ for raw_name, raw_definition in raw_parser_types.items()
423
+ }
424
+ return ParserConfig(step_matcher=step_matcher, types=parser_types)
425
+
426
+
427
+ def _load_logging_config(raw_logging: Mapping[str, Any]) -> LoggingConfig:
428
+ logger_specs = {
429
+ str(raw_name): _load_logger_spec(str(raw_name), raw_definition)
430
+ for raw_name, raw_definition in raw_logging.items()
431
+ }
432
+ return LoggingConfig(loggers=logger_specs)
433
+
434
+
435
+ def _load_logger_spec(logger_name: str, raw_definition: Any) -> LoggerSpec:
436
+ if not isinstance(raw_definition, Mapping):
437
+ raise ConfigError(f"Logger '{logger_name}' must be defined as a mapping.")
438
+
439
+ path = raw_definition.get("path")
440
+ if path is None:
441
+ raise ConfigError(f"Logger '{logger_name}' must define a 'path'.")
442
+ if isinstance(path, str) and not path.strip():
443
+ raise ConfigError(f"Logger '{logger_name}' field 'path' must not be empty.")
444
+
445
+ logger_target_name = _normalized_optional_string(
446
+ raw_definition,
447
+ "logger_name",
448
+ type_name=logger_name,
449
+ label="Logger",
450
+ )
451
+ inject_as = _normalized_optional_string(
452
+ raw_definition,
453
+ "inject_as",
454
+ type_name=logger_name,
455
+ label="Logger",
456
+ )
457
+ mode = _normalized_optional_string(
458
+ raw_definition,
459
+ "mode",
460
+ type_name=logger_name,
461
+ label="Logger",
462
+ )
463
+ if mode is None:
464
+ mode = "w"
465
+
466
+ console = raw_definition.get("console", True)
467
+ if not isinstance(console, bool):
468
+ raise ConfigError(f"Logger '{logger_name}' field 'console' must be a bool.")
469
+
470
+ level = raw_definition.get("level", "INFO")
471
+ if not isinstance(level, (int, str)) or (isinstance(level, str) and not level.strip()):
472
+ raise ConfigError(
473
+ f"Logger '{logger_name}' field 'level' must be an integer or non-empty string."
474
+ )
475
+ if isinstance(level, str):
476
+ level = level.strip()
477
+ candidate = getattr(logging, level.upper(), None)
478
+ if not isinstance(candidate, int):
479
+ raise ConfigError(
480
+ f"Logger '{logger_name}' field 'level' uses unsupported value '{level}'."
481
+ )
482
+
483
+ return LoggerSpec(
484
+ name=logger_name,
485
+ path=path,
486
+ level=level,
487
+ logger_name=logger_target_name,
488
+ console=console,
489
+ mode=mode,
490
+ inject_as=inject_as,
491
+ )
492
+
493
+
494
+ def _load_parser_type_spec(type_name: str, raw_definition: Any) -> ParserTypeSpec:
495
+ parser_definition = _normalize_parser_type_definition(type_name, raw_definition)
496
+
497
+ converter = _normalized_optional_string(
498
+ parser_definition,
499
+ "converter",
500
+ type_name=type_name,
501
+ )
502
+ enum = _normalized_optional_string(
503
+ parser_definition,
504
+ "enum",
505
+ type_name=type_name,
506
+ )
507
+ if bool(converter) == bool(enum):
508
+ raise ConfigError(
509
+ f"Parser type '{type_name}' must define exactly one of "
510
+ "'converter' or 'enum'."
511
+ )
512
+
513
+ pattern = _normalized_optional_string(
514
+ parser_definition,
515
+ "pattern",
516
+ type_name=type_name,
517
+ )
518
+
519
+ regex_group_count = parser_definition.get("regex_group_count")
520
+ if regex_group_count is not None and (
521
+ not isinstance(regex_group_count, int) or regex_group_count < 0
522
+ ):
523
+ raise ConfigError(
524
+ f"Parser type '{type_name}' field 'regex_group_count' must be "
525
+ "a non-negative integer if provided."
526
+ )
527
+
528
+ matcher = _normalized_optional_string(
529
+ parser_definition,
530
+ "matcher",
531
+ type_name=type_name,
532
+ )
533
+ if matcher is not None and matcher not in PARSER_TYPE_MATCHERS:
534
+ supported_matchers = ", ".join(sorted(PARSER_TYPE_MATCHERS))
535
+ raise ConfigError(
536
+ f"Parser type '{type_name}' field 'matcher' must be one of "
537
+ f"{supported_matchers}."
538
+ )
539
+
540
+ case_sensitive = parser_definition.get("case_sensitive", True)
541
+ if not isinstance(case_sensitive, bool):
542
+ raise ConfigError(
543
+ f"Parser type '{type_name}' field 'case_sensitive' must be a bool."
544
+ )
545
+
546
+ lookup = parser_definition.get("lookup", "value")
547
+ if not isinstance(lookup, str) or lookup not in ENUM_LOOKUPS:
548
+ supported_lookups = ", ".join(sorted(ENUM_LOOKUPS))
549
+ raise ConfigError(
550
+ f"Parser type '{type_name}' field 'lookup' must be one of "
551
+ f"{supported_lookups}."
552
+ )
553
+
554
+ if converter is not None and "case_sensitive" in parser_definition:
555
+ raise ConfigError(
556
+ f"Parser type '{type_name}' cannot set 'case_sensitive' unless "
557
+ "it uses 'enum'."
558
+ )
559
+ if converter is not None and "lookup" in parser_definition:
560
+ raise ConfigError(
561
+ f"Parser type '{type_name}' cannot set 'lookup' unless it uses "
562
+ "'enum'."
563
+ )
564
+
565
+ return ParserTypeSpec(
566
+ name=type_name,
567
+ converter=converter,
568
+ enum=enum,
569
+ pattern=pattern,
570
+ regex_group_count=regex_group_count,
571
+ matcher=matcher,
572
+ case_sensitive=case_sensitive,
573
+ lookup=lookup,
574
+ )
575
+
576
+
577
+ def _normalize_parser_type_definition(
578
+ type_name: str,
579
+ raw_definition: Any,
580
+ ) -> Mapping[str, Any]:
581
+ if isinstance(raw_definition, str):
582
+ return {"converter": raw_definition}
583
+ if isinstance(raw_definition, Mapping):
584
+ return raw_definition
585
+ raise ConfigError(
586
+ f"Parser type '{type_name}' must be defined as a mapping or converter "
587
+ "string."
588
+ )
589
+
590
+
591
+ def _normalized_optional_string(
592
+ definition: Mapping[str, Any],
593
+ field_name: str,
594
+ *,
595
+ type_name: str,
596
+ label: str = "Parser type",
597
+ ) -> str | None:
598
+ value = definition.get(field_name)
599
+ if value is None:
600
+ return None
601
+ if not isinstance(value, str) or not value.strip():
602
+ raise ConfigError(
603
+ f"{label} '{type_name}' field '{field_name}' must be a non-empty "
604
+ "string if provided."
605
+ )
606
+ return value.strip()