unityflow 0.3.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,676 @@
1
+ """Unity YAML Parser using rapidyaml.
2
+
3
+ Provides fast parsing for Unity YAML files using the rapidyaml library.
4
+ Includes streaming support for large files and progress callbacks.
5
+ """
6
+
7
+ from __future__ import annotations
8
+
9
+ import re
10
+ from collections.abc import Callable, Generator
11
+ from pathlib import Path
12
+ from typing import Any
13
+
14
+ import ryml
15
+
16
+ # Threshold for using streaming mode (10MB)
17
+ LARGE_FILE_THRESHOLD = 10 * 1024 * 1024
18
+
19
+ # Callback type for progress reporting
20
+ ProgressCallback = Callable[[int, int], None] # (current, total)
21
+
22
+ # Unity YAML header pattern
23
+ UNITY_HEADER = """%YAML 1.1
24
+ %TAG !u! tag:unity3d.com,2011:
25
+ """
26
+
27
+ # Pattern to match Unity document headers: --- !u!{ClassID} &{fileID}
28
+ # Note: fileID can be negative (Unity uses 64-bit signed integers)
29
+ DOCUMENT_HEADER_PATTERN = re.compile(r"^--- !u!(\d+) &(-?\d+)(?: stripped)?$", re.MULTILINE)
30
+
31
+ # Pattern to match Unity GUIDs (32 hexadecimal characters)
32
+ # This is used to prevent GUIDs like "0000000000000000e000000000000000" from being
33
+ # parsed as scientific notation floats (0e000000000000000 = 0.0)
34
+ GUID_PATTERN = re.compile(r"^[0-9a-fA-F]{32}$")
35
+
36
+
37
+ def _iter_children(tree: Any, node_id: int) -> list[int]:
38
+ """Iterate over children of a node."""
39
+ if not tree.has_children(node_id):
40
+ return []
41
+ children = []
42
+ child = tree.first_child(node_id)
43
+ while child != ryml.NONE:
44
+ children.append(child)
45
+ child = tree.next_sibling(child)
46
+ return children
47
+
48
+
49
+ def _to_python(tree: Any, node_id: int) -> Any:
50
+ """Convert rapidyaml tree node to Python object."""
51
+ if tree.is_map(node_id):
52
+ result = {}
53
+ for child in _iter_children(tree, node_id):
54
+ if tree.has_key(child):
55
+ key = bytes(tree.key(child)).decode("utf-8")
56
+ else:
57
+ key = ""
58
+ result[key] = _to_python(tree, child)
59
+ return result
60
+ elif tree.is_seq(node_id):
61
+ return [_to_python(tree, child) for child in _iter_children(tree, node_id)]
62
+ elif tree.has_val(node_id):
63
+ val_mv = tree.val(node_id)
64
+ if val_mv is None:
65
+ return None
66
+ val_bytes = bytes(val_mv)
67
+ if not val_bytes:
68
+ return ""
69
+ val = val_bytes.decode("utf-8")
70
+
71
+ # Handle YAML null values
72
+ if val in ("null", "~", ""):
73
+ return None
74
+
75
+ # Try converting to int (but preserve strings with leading zeros)
76
+ if val.lstrip("-").isdigit():
77
+ # Check for leading zeros - keep as string to preserve format
78
+ stripped = val.lstrip("-")
79
+ if len(stripped) > 1 and stripped.startswith("0"):
80
+ # Has leading zeros - keep as string
81
+ return val
82
+ try:
83
+ return int(val)
84
+ except ValueError:
85
+ pass
86
+
87
+ # Skip float conversion for GUID-like strings (32 hex chars)
88
+ # GUIDs like "0000000000000000e000000000000000" would otherwise be
89
+ # parsed as scientific notation (0e000000000000000 = 0.0)
90
+ if GUID_PATTERN.match(val):
91
+ return val
92
+
93
+ # Try converting to float
94
+ try:
95
+ return float(val)
96
+ except ValueError:
97
+ pass
98
+
99
+ # Return as string
100
+ return val
101
+ # Node has neither map, seq, nor val - treat as null
102
+ return None
103
+
104
+
105
+ def fast_parse_yaml(content: str) -> dict[str, Any]:
106
+ """Parse a single YAML document using rapidyaml.
107
+
108
+ Args:
109
+ content: YAML content string
110
+
111
+ Returns:
112
+ Parsed Python dictionary
113
+ """
114
+ tree = ryml.parse_in_arena(content.encode("utf-8"))
115
+ return _to_python(tree, tree.root_id())
116
+
117
+
118
+ def fast_parse_unity_yaml(
119
+ content: str,
120
+ progress_callback: ProgressCallback | None = None,
121
+ ) -> list[tuple[int, int, bool, dict[str, Any]]]:
122
+ """Parse Unity YAML content using rapidyaml.
123
+
124
+ Args:
125
+ content: Unity YAML file content
126
+ progress_callback: Optional callback for progress reporting (current, total)
127
+
128
+ Returns:
129
+ List of (class_id, file_id, stripped, data) tuples
130
+ """
131
+ lines = content.split("\n")
132
+
133
+ # Find all document boundaries
134
+ doc_starts: list[tuple[int, int, int, bool]] = []
135
+
136
+ for i, line in enumerate(lines):
137
+ match = DOCUMENT_HEADER_PATTERN.match(line)
138
+ if match:
139
+ class_id = int(match.group(1))
140
+ file_id = int(match.group(2))
141
+ stripped = "stripped" in line
142
+ doc_starts.append((i, class_id, file_id, stripped))
143
+
144
+ if not doc_starts:
145
+ return []
146
+
147
+ results = []
148
+ total_docs = len(doc_starts)
149
+
150
+ for idx, (start_line, class_id, file_id, stripped) in enumerate(doc_starts):
151
+ # Report progress
152
+ if progress_callback:
153
+ progress_callback(idx, total_docs)
154
+
155
+ # Determine end of this document
156
+ if idx + 1 < len(doc_starts):
157
+ end_line = doc_starts[idx + 1][0]
158
+ else:
159
+ end_line = len(lines)
160
+
161
+ # Extract document content (skip the --- header line)
162
+ doc_content = "\n".join(lines[start_line + 1 : end_line])
163
+
164
+ if not doc_content.strip():
165
+ # Empty document
166
+ data = {}
167
+ else:
168
+ try:
169
+ tree = ryml.parse_in_arena(doc_content.encode("utf-8"))
170
+ data = _to_python(tree, tree.root_id())
171
+ if not isinstance(data, dict):
172
+ data = {}
173
+ except Exception as e:
174
+ raise ValueError(
175
+ f"Failed to parse document at line {start_line + 1} "
176
+ f"(class_id={class_id}, file_id={file_id}): {e}"
177
+ ) from e
178
+
179
+ results.append((class_id, file_id, stripped, data))
180
+
181
+ # Final progress callback
182
+ if progress_callback:
183
+ progress_callback(total_docs, total_docs)
184
+
185
+ return results
186
+
187
+
188
+ def iter_parse_unity_yaml(
189
+ content: str,
190
+ progress_callback: ProgressCallback | None = None,
191
+ ) -> Generator[tuple[int, int, bool, dict[str, Any]], None, None]:
192
+ """Parse Unity YAML content using rapidyaml, yielding documents one at a time.
193
+
194
+ This is a memory-efficient generator version that doesn't load all documents
195
+ into memory at once. Useful for large files.
196
+
197
+ Args:
198
+ content: Unity YAML file content
199
+ progress_callback: Optional callback for progress reporting (current, total)
200
+
201
+ Yields:
202
+ Tuples of (class_id, file_id, stripped, data)
203
+ """
204
+ lines = content.split("\n")
205
+
206
+ # Find all document boundaries
207
+ doc_starts: list[tuple[int, int, int, bool]] = []
208
+
209
+ for i, line in enumerate(lines):
210
+ match = DOCUMENT_HEADER_PATTERN.match(line)
211
+ if match:
212
+ class_id = int(match.group(1))
213
+ file_id = int(match.group(2))
214
+ stripped = "stripped" in line
215
+ doc_starts.append((i, class_id, file_id, stripped))
216
+
217
+ if not doc_starts:
218
+ return
219
+
220
+ total_docs = len(doc_starts)
221
+
222
+ for idx, (start_line, class_id, file_id, stripped) in enumerate(doc_starts):
223
+ # Report progress
224
+ if progress_callback:
225
+ progress_callback(idx, total_docs)
226
+
227
+ # Determine end of this document
228
+ if idx + 1 < len(doc_starts):
229
+ end_line = doc_starts[idx + 1][0]
230
+ else:
231
+ end_line = len(lines)
232
+
233
+ # Extract document content (skip the --- header line)
234
+ doc_content = "\n".join(lines[start_line + 1 : end_line])
235
+
236
+ if not doc_content.strip():
237
+ # Empty document
238
+ data = {}
239
+ else:
240
+ try:
241
+ tree = ryml.parse_in_arena(doc_content.encode("utf-8"))
242
+ data = _to_python(tree, tree.root_id())
243
+ if not isinstance(data, dict):
244
+ data = {}
245
+ except Exception as e:
246
+ raise ValueError(
247
+ f"Failed to parse document at line {start_line + 1} "
248
+ f"(class_id={class_id}, file_id={file_id}): {e}"
249
+ ) from e
250
+
251
+ yield (class_id, file_id, stripped, data)
252
+
253
+ # Final progress callback
254
+ if progress_callback:
255
+ progress_callback(total_docs, total_docs)
256
+
257
+
258
+ def stream_parse_unity_yaml_file(
259
+ file_path: str | Path,
260
+ chunk_size: int = 8 * 1024 * 1024, # 8MB chunks
261
+ progress_callback: ProgressCallback | None = None,
262
+ ) -> Generator[tuple[int, int, bool, dict[str, Any]], None, None]:
263
+ """Stream parse a Unity YAML file without loading it entirely into memory.
264
+
265
+ This function is optimized for very large files (100MB+). It reads the file
266
+ in chunks and yields documents as they are parsed.
267
+
268
+ Args:
269
+ file_path: Path to the Unity YAML file
270
+ chunk_size: Size of chunks to read (default: 8MB)
271
+ progress_callback: Optional callback for progress reporting (bytes_read, total_bytes)
272
+
273
+ Yields:
274
+ Tuples of (class_id, file_id, stripped, data)
275
+ """
276
+ file_path = Path(file_path)
277
+ file_size = file_path.stat().st_size
278
+
279
+ # For smaller files, use the standard approach
280
+ if file_size < LARGE_FILE_THRESHOLD:
281
+ content = file_path.read_text(encoding="utf-8")
282
+ yield from iter_parse_unity_yaml(content, progress_callback)
283
+ return
284
+
285
+ # For large files, use streaming approach
286
+ buffer = ""
287
+ bytes_read = 0
288
+ pending_doc: tuple[int, int, bool, list[str]] | None = None
289
+
290
+ with open(file_path, encoding="utf-8") as f:
291
+ while True:
292
+ chunk = f.read(chunk_size)
293
+ if not chunk:
294
+ break
295
+
296
+ bytes_read += len(chunk.encode("utf-8"))
297
+ buffer += chunk
298
+
299
+ # Process complete documents in the buffer
300
+ while True:
301
+ # Find the next document header
302
+ match = DOCUMENT_HEADER_PATTERN.search(buffer)
303
+ if not match:
304
+ break
305
+
306
+ # If we have a pending document, finalize it
307
+ if pending_doc is not None:
308
+ class_id, file_id, stripped, doc_lines = pending_doc
309
+ # Everything before this match belongs to the previous document
310
+ doc_content = buffer[: match.start()]
311
+ doc_lines.append(doc_content)
312
+ full_content = "".join(doc_lines).strip()
313
+
314
+ if full_content:
315
+ try:
316
+ tree = ryml.parse_in_arena(full_content.encode("utf-8"))
317
+ data = _to_python(tree, tree.root_id())
318
+ if not isinstance(data, dict):
319
+ data = {}
320
+ except Exception:
321
+ data = {}
322
+ else:
323
+ data = {}
324
+
325
+ yield (class_id, file_id, stripped, data)
326
+
327
+ # Start a new pending document
328
+ class_id = int(match.group(1))
329
+ file_id = int(match.group(2))
330
+ stripped = "stripped" in match.group(0)
331
+
332
+ # Move buffer past the header
333
+ buffer = buffer[match.end() :]
334
+ if buffer.startswith("\n"):
335
+ buffer = buffer[1:]
336
+
337
+ pending_doc = (class_id, file_id, stripped, [])
338
+
339
+ # Report progress
340
+ if progress_callback:
341
+ progress_callback(bytes_read, file_size)
342
+
343
+ # Process the last document
344
+ if pending_doc is not None:
345
+ class_id, file_id, stripped, doc_lines = pending_doc
346
+ doc_lines.append(buffer)
347
+ full_content = "".join(doc_lines).strip()
348
+
349
+ if full_content:
350
+ try:
351
+ tree = ryml.parse_in_arena(full_content.encode("utf-8"))
352
+ data = _to_python(tree, tree.root_id())
353
+ if not isinstance(data, dict):
354
+ data = {}
355
+ except Exception:
356
+ data = {}
357
+ else:
358
+ data = {}
359
+
360
+ yield (class_id, file_id, stripped, data)
361
+
362
+ # Final progress callback
363
+ if progress_callback:
364
+ progress_callback(file_size, file_size)
365
+
366
+
367
+ def get_file_stats(file_path: str | Path) -> dict[str, Any]:
368
+ """Get statistics about a Unity YAML file without fully parsing it.
369
+
370
+ This is a fast operation that only scans document headers.
371
+
372
+ Args:
373
+ file_path: Path to the Unity YAML file
374
+
375
+ Returns:
376
+ Dictionary with file statistics
377
+ """
378
+ file_path = Path(file_path)
379
+ file_size = file_path.stat().st_size
380
+
381
+ doc_count = 0
382
+ class_counts: dict[int, int] = {}
383
+
384
+ with open(file_path, encoding="utf-8") as f:
385
+ for line in f:
386
+ match = DOCUMENT_HEADER_PATTERN.match(line)
387
+ if match:
388
+ doc_count += 1
389
+ class_id = int(match.group(1))
390
+ class_counts[class_id] = class_counts.get(class_id, 0) + 1
391
+
392
+ return {
393
+ "file_size": file_size,
394
+ "file_size_mb": round(file_size / (1024 * 1024), 2),
395
+ "document_count": doc_count,
396
+ "class_counts": class_counts,
397
+ "is_large_file": file_size >= LARGE_FILE_THRESHOLD,
398
+ }
399
+
400
+
401
+ def fast_dump_unity_object(data: dict[str, Any]) -> str:
402
+ """Dump a Unity YAML object to string using fast serialization.
403
+
404
+ This produces Unity-compatible YAML output with proper formatting.
405
+ """
406
+ lines: list[str] = []
407
+ _dump_dict(data, lines, indent=0)
408
+ return "\n".join(lines)
409
+
410
+
411
+ def _dump_dict(data: dict[str, Any], lines: list[str], indent: int) -> None:
412
+ """Dump a dictionary to YAML lines."""
413
+ prefix = " " * indent
414
+
415
+ for key, value in data.items():
416
+ if isinstance(value, dict):
417
+ if not value:
418
+ lines.append(f"{prefix}{key}: {{}}")
419
+ elif _is_flow_dict(value):
420
+ flow = _to_flow(value)
421
+ lines.append(f"{prefix}{key}: {flow}")
422
+ else:
423
+ lines.append(f"{prefix}{key}:")
424
+ _dump_dict(value, lines, indent + 1)
425
+ elif isinstance(value, list):
426
+ if not value:
427
+ lines.append(f"{prefix}{key}: []")
428
+ else:
429
+ lines.append(f"{prefix}{key}:")
430
+ _dump_list(value, lines, indent)
431
+ else:
432
+ scalar = _format_scalar(value)
433
+ if scalar:
434
+ lines.append(f"{prefix}{key}: {scalar}")
435
+ else:
436
+ # Empty value - no space after colon
437
+ lines.append(f"{prefix}{key}:")
438
+
439
+
440
+ def _dump_list(data: list[Any], lines: list[str], indent: int) -> None:
441
+ """Dump a list to YAML lines."""
442
+ prefix = " " * indent
443
+
444
+ for item in data:
445
+ if isinstance(item, dict):
446
+ if _is_flow_dict(item):
447
+ flow = _to_flow(item)
448
+ lines.append(f"{prefix}- {flow}")
449
+ else:
450
+ # Block style dict in list
451
+ keys = list(item.keys())
452
+ if keys:
453
+ first_key = keys[0]
454
+ first_val = item[first_key]
455
+ if isinstance(first_val, dict) and _is_flow_dict(first_val):
456
+ lines.append(f"{prefix}- {first_key}: {_to_flow(first_val)}")
457
+ elif isinstance(first_val, (dict, list)) and first_val:
458
+ lines.append(f"{prefix}- {first_key}:")
459
+ if isinstance(first_val, dict):
460
+ _dump_dict(first_val, lines, indent + 2)
461
+ else:
462
+ _dump_list(first_val, lines, indent + 1)
463
+ else:
464
+ scalar = _format_scalar(first_val)
465
+ if scalar:
466
+ lines.append(f"{prefix}- {first_key}: {scalar}")
467
+ else:
468
+ lines.append(f"{prefix}- {first_key}:")
469
+
470
+ # Rest of keys
471
+ for key in keys[1:]:
472
+ val = item[key]
473
+ inner_prefix = " " * (indent + 1)
474
+ if isinstance(val, dict):
475
+ if not val:
476
+ lines.append(f"{inner_prefix}{key}: {{}}")
477
+ elif _is_flow_dict(val):
478
+ lines.append(f"{inner_prefix}{key}: {_to_flow(val)}")
479
+ else:
480
+ lines.append(f"{inner_prefix}{key}:")
481
+ _dump_dict(val, lines, indent + 2)
482
+ elif isinstance(val, list):
483
+ if not val:
484
+ lines.append(f"{inner_prefix}{key}: []")
485
+ else:
486
+ lines.append(f"{inner_prefix}{key}:")
487
+ _dump_list(val, lines, indent + 1)
488
+ else:
489
+ scalar = _format_scalar(val)
490
+ if scalar:
491
+ lines.append(f"{inner_prefix}{key}: {scalar}")
492
+ else:
493
+ lines.append(f"{inner_prefix}{key}:")
494
+ else:
495
+ lines.append(f"{prefix}- {{}}")
496
+ elif isinstance(item, list):
497
+ lines.append(f"{prefix}-")
498
+ _dump_list(item, lines, indent + 1)
499
+ else:
500
+ lines.append(f"{prefix}- {_format_scalar(item)}")
501
+
502
+
503
+ def _is_flow_dict(d: dict) -> bool:
504
+ """Check if a dict should be rendered in flow style.
505
+
506
+ Unity uses flow style for simple references like {fileID: 123}.
507
+ """
508
+ if not d:
509
+ return True
510
+ keys = set(d.keys())
511
+ # Flow style for Unity references
512
+ if keys <= {"fileID", "guid", "type"}:
513
+ return True
514
+ # Flow style for simple vectors (x, y, z, w)
515
+ if keys <= {"x", "y", "z", "w"} and all(isinstance(v, (int, float)) for v in d.values()):
516
+ return True
517
+ # Flow style for colors (r, g, b, a)
518
+ if keys <= {"r", "g", "b", "a"} and all(isinstance(v, (int, float)) for v in d.values()):
519
+ return True
520
+ return False
521
+
522
+
523
+ def _to_flow(d: dict) -> str:
524
+ """Convert a dict to flow style."""
525
+ parts = []
526
+ for k, v in d.items():
527
+ parts.append(f"{k}: {_format_scalar(v)}")
528
+ return "{" + ", ".join(parts) + "}"
529
+
530
+
531
+ def _format_scalar(value: Any) -> str:
532
+ """Format a scalar value for YAML output."""
533
+ if value is None:
534
+ return ""
535
+ if isinstance(value, bool):
536
+ return "1" if value else "0"
537
+ if isinstance(value, int):
538
+ return str(value)
539
+ if isinstance(value, float):
540
+ # Preserve decimal point for floats (0.0 stays as "0.0", not "0")
541
+ return str(value)
542
+ if isinstance(value, str):
543
+ # Empty string - no value after colon
544
+ if not value:
545
+ return ""
546
+ if value in ("true", "false", "null", "yes", "no", "on", "off", "True", "False"):
547
+ return f"'{value}'"
548
+ # Standalone '-' or '~' are interpreted as null in YAML - must quote them
549
+ if value in ("-", "~"):
550
+ return f"'{value}'"
551
+ # Check for special characters that require quoting
552
+ # Note: [] don't require quoting when not at start
553
+ needs_quote = False
554
+ if value.startswith(("[", "{", "*", "&", "!", "|", ">", "'", '"', "%", "@", "`")):
555
+ needs_quote = True
556
+ elif any(c in value for c in ":\n#"):
557
+ needs_quote = True
558
+ elif value.startswith("- ") or value.startswith("? ") or value.startswith("-\t"):
559
+ needs_quote = True
560
+
561
+ if needs_quote:
562
+ # Use single quotes, escape internal quotes
563
+ escaped = value.replace("'", "''")
564
+ return f"'{escaped}'"
565
+ # Check if it looks like a number (but not strings with leading zeros)
566
+ if not (value.lstrip("-").startswith("0") and len(value.lstrip("-")) > 1):
567
+ try:
568
+ float(value)
569
+ return f"'{value}'"
570
+ except ValueError:
571
+ pass
572
+ return value
573
+ return str(value)
574
+
575
+
576
+ def iter_dump_unity_object(data: dict[str, Any]) -> Generator[str, None, None]:
577
+ """Dump a Unity YAML object, yielding lines one at a time.
578
+
579
+ This is a memory-efficient generator version for large objects.
580
+
581
+ Args:
582
+ data: Dictionary to dump
583
+
584
+ Yields:
585
+ YAML lines as strings
586
+ """
587
+ yield from _iter_dump_dict(data, indent=0)
588
+
589
+
590
+ def _iter_dump_dict(data: dict[str, Any], indent: int) -> Generator[str, None, None]:
591
+ """Dump a dictionary to YAML lines, yielding each line."""
592
+ prefix = " " * indent
593
+
594
+ for key, value in data.items():
595
+ if isinstance(value, dict):
596
+ if not value:
597
+ yield f"{prefix}{key}: {{}}"
598
+ elif _is_flow_dict(value):
599
+ flow = _to_flow(value)
600
+ yield f"{prefix}{key}: {flow}"
601
+ else:
602
+ yield f"{prefix}{key}:"
603
+ yield from _iter_dump_dict(value, indent + 1)
604
+ elif isinstance(value, list):
605
+ if not value:
606
+ yield f"{prefix}{key}: []"
607
+ else:
608
+ yield f"{prefix}{key}:"
609
+ yield from _iter_dump_list(value, indent)
610
+ else:
611
+ scalar = _format_scalar(value)
612
+ if scalar:
613
+ yield f"{prefix}{key}: {scalar}"
614
+ else:
615
+ yield f"{prefix}{key}:"
616
+
617
+
618
+ def _iter_dump_list(data: list[Any], indent: int) -> Generator[str, None, None]:
619
+ """Dump a list to YAML lines, yielding each line."""
620
+ prefix = " " * indent
621
+
622
+ for item in data:
623
+ if isinstance(item, dict):
624
+ if _is_flow_dict(item):
625
+ flow = _to_flow(item)
626
+ yield f"{prefix}- {flow}"
627
+ else:
628
+ keys = list(item.keys())
629
+ if keys:
630
+ first_key = keys[0]
631
+ first_val = item[first_key]
632
+ if isinstance(first_val, dict) and _is_flow_dict(first_val):
633
+ yield f"{prefix}- {first_key}: {_to_flow(first_val)}"
634
+ elif isinstance(first_val, (dict, list)) and first_val:
635
+ yield f"{prefix}- {first_key}:"
636
+ if isinstance(first_val, dict):
637
+ yield from _iter_dump_dict(first_val, indent + 2)
638
+ else:
639
+ yield from _iter_dump_list(first_val, indent + 1)
640
+ else:
641
+ scalar = _format_scalar(first_val)
642
+ if scalar:
643
+ yield f"{prefix}- {first_key}: {scalar}"
644
+ else:
645
+ yield f"{prefix}- {first_key}:"
646
+
647
+ for key in keys[1:]:
648
+ val = item[key]
649
+ inner_prefix = " " * (indent + 1)
650
+ if isinstance(val, dict):
651
+ if not val:
652
+ yield f"{inner_prefix}{key}: {{}}"
653
+ elif _is_flow_dict(val):
654
+ yield f"{inner_prefix}{key}: {_to_flow(val)}"
655
+ else:
656
+ yield f"{inner_prefix}{key}:"
657
+ yield from _iter_dump_dict(val, indent + 2)
658
+ elif isinstance(val, list):
659
+ if not val:
660
+ yield f"{inner_prefix}{key}: []"
661
+ else:
662
+ yield f"{inner_prefix}{key}:"
663
+ yield from _iter_dump_list(val, indent + 1)
664
+ else:
665
+ scalar = _format_scalar(val)
666
+ if scalar:
667
+ yield f"{inner_prefix}{key}: {scalar}"
668
+ else:
669
+ yield f"{inner_prefix}{key}:"
670
+ else:
671
+ yield f"{prefix}- {{}}"
672
+ elif isinstance(item, list):
673
+ yield f"{prefix}-"
674
+ yield from _iter_dump_list(item, indent + 1)
675
+ else:
676
+ yield f"{prefix}- {_format_scalar(item)}"