unityflow 0.3.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- unityflow/__init__.py +167 -0
- unityflow/asset_resolver.py +636 -0
- unityflow/asset_tracker.py +1687 -0
- unityflow/cli.py +2317 -0
- unityflow/data/__init__.py +1 -0
- unityflow/data/class_ids.json +336 -0
- unityflow/diff.py +234 -0
- unityflow/fast_parser.py +676 -0
- unityflow/formats.py +1558 -0
- unityflow/git_utils.py +307 -0
- unityflow/hierarchy.py +1672 -0
- unityflow/merge.py +226 -0
- unityflow/meta_generator.py +1291 -0
- unityflow/normalizer.py +529 -0
- unityflow/parser.py +698 -0
- unityflow/query.py +406 -0
- unityflow/script_parser.py +717 -0
- unityflow/sprite.py +378 -0
- unityflow/validator.py +783 -0
- unityflow-0.3.4.dist-info/METADATA +293 -0
- unityflow-0.3.4.dist-info/RECORD +25 -0
- unityflow-0.3.4.dist-info/WHEEL +5 -0
- unityflow-0.3.4.dist-info/entry_points.txt +2 -0
- unityflow-0.3.4.dist-info/licenses/LICENSE +21 -0
- unityflow-0.3.4.dist-info/top_level.txt +1 -0
unityflow/cli.py
ADDED
|
@@ -0,0 +1,2317 @@
|
|
|
1
|
+
"""Command-line interface for unityflow.
|
|
2
|
+
|
|
3
|
+
Provides commands for normalizing, diffing, and validating Unity YAML files.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import sys
|
|
9
|
+
from collections.abc import Callable
|
|
10
|
+
from concurrent.futures import ProcessPoolExecutor, as_completed
|
|
11
|
+
from pathlib import Path
|
|
12
|
+
|
|
13
|
+
import click
|
|
14
|
+
|
|
15
|
+
from unityflow import __version__
|
|
16
|
+
from unityflow.asset_tracker import (
|
|
17
|
+
find_unity_project_root,
|
|
18
|
+
)
|
|
19
|
+
from unityflow.diff import DiffFormat, PrefabDiff
|
|
20
|
+
from unityflow.git_utils import (
|
|
21
|
+
get_changed_files,
|
|
22
|
+
get_files_changed_since,
|
|
23
|
+
get_repo_root,
|
|
24
|
+
is_git_repository,
|
|
25
|
+
)
|
|
26
|
+
from unityflow.normalizer import UnityPrefabNormalizer
|
|
27
|
+
from unityflow.parser import UnityYAMLDocument
|
|
28
|
+
from unityflow.validator import PrefabValidator
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def _normalize_single_file(args: tuple) -> tuple[Path, bool, str]:
|
|
32
|
+
"""Normalize a single file (for parallel processing).
|
|
33
|
+
|
|
34
|
+
Args:
|
|
35
|
+
args: Tuple of (file_path, normalizer_kwargs)
|
|
36
|
+
|
|
37
|
+
Returns:
|
|
38
|
+
Tuple of (file_path, success, message)
|
|
39
|
+
"""
|
|
40
|
+
file_path, kwargs = args
|
|
41
|
+
try:
|
|
42
|
+
normalizer = UnityPrefabNormalizer(**kwargs)
|
|
43
|
+
content = normalizer.normalize_file(file_path)
|
|
44
|
+
file_path.write_text(content, encoding="utf-8", newline="\n")
|
|
45
|
+
return (file_path, True, "")
|
|
46
|
+
except Exception as e:
|
|
47
|
+
return (file_path, False, str(e))
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def create_progress_bar(
|
|
51
|
+
total: int,
|
|
52
|
+
label: str = "Processing",
|
|
53
|
+
show_eta: bool = True,
|
|
54
|
+
) -> tuple[Callable[[int, int], None], Callable[[], None]]:
|
|
55
|
+
"""Create a progress bar and return update/close callbacks.
|
|
56
|
+
|
|
57
|
+
Args:
|
|
58
|
+
total: Total number of items
|
|
59
|
+
label: Progress bar label
|
|
60
|
+
show_eta: Whether to show ETA
|
|
61
|
+
|
|
62
|
+
Returns:
|
|
63
|
+
Tuple of (update_callback, close_callback)
|
|
64
|
+
"""
|
|
65
|
+
bar = click.progressbar(
|
|
66
|
+
length=total,
|
|
67
|
+
label=label,
|
|
68
|
+
show_eta=show_eta,
|
|
69
|
+
show_percent=True,
|
|
70
|
+
)
|
|
71
|
+
bar.__enter__()
|
|
72
|
+
|
|
73
|
+
def update(current: int, total: int) -> None:
|
|
74
|
+
bar.update(1)
|
|
75
|
+
|
|
76
|
+
def close() -> None:
|
|
77
|
+
bar.__exit__(None, None, None)
|
|
78
|
+
|
|
79
|
+
return update, close
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
@click.group()
|
|
83
|
+
@click.version_option(version=__version__, prog_name="unityflow")
|
|
84
|
+
def main() -> None:
|
|
85
|
+
"""Unity YAML Deterministic Serializer.
|
|
86
|
+
|
|
87
|
+
A tool for canonical serialization of Unity YAML files (.prefab, .unity,
|
|
88
|
+
.asset, etc.) to eliminate non-deterministic changes and reduce VCS noise.
|
|
89
|
+
"""
|
|
90
|
+
pass
|
|
91
|
+
|
|
92
|
+
|
|
93
|
+
@main.command()
|
|
94
|
+
@click.argument("input_files", nargs=-1, type=click.Path(exists=True, path_type=Path))
|
|
95
|
+
@click.option(
|
|
96
|
+
"-o",
|
|
97
|
+
"--output",
|
|
98
|
+
type=click.Path(path_type=Path),
|
|
99
|
+
help="Output file path (only for single file, default: overwrite input)",
|
|
100
|
+
)
|
|
101
|
+
@click.option(
|
|
102
|
+
"--stdout",
|
|
103
|
+
is_flag=True,
|
|
104
|
+
help="Write to stdout instead of file (only for single file)",
|
|
105
|
+
)
|
|
106
|
+
@click.option(
|
|
107
|
+
"--changed-only",
|
|
108
|
+
is_flag=True,
|
|
109
|
+
help="Normalize only files changed in git working tree",
|
|
110
|
+
)
|
|
111
|
+
@click.option(
|
|
112
|
+
"--staged-only",
|
|
113
|
+
is_flag=True,
|
|
114
|
+
help="Normalize only staged files (use with --changed-only)",
|
|
115
|
+
)
|
|
116
|
+
@click.option(
|
|
117
|
+
"--since",
|
|
118
|
+
"since_ref",
|
|
119
|
+
type=str,
|
|
120
|
+
help="Normalize files changed since git reference (e.g., HEAD~5, main, v1.0)",
|
|
121
|
+
)
|
|
122
|
+
@click.option(
|
|
123
|
+
"--pattern",
|
|
124
|
+
type=str,
|
|
125
|
+
help="Filter files by glob pattern (e.g., 'Assets/Prefabs/**/*.prefab')",
|
|
126
|
+
)
|
|
127
|
+
@click.option(
|
|
128
|
+
"--dry-run",
|
|
129
|
+
is_flag=True,
|
|
130
|
+
help="Show files that would be normalized without making changes",
|
|
131
|
+
)
|
|
132
|
+
@click.option(
|
|
133
|
+
"--hex-floats",
|
|
134
|
+
is_flag=True,
|
|
135
|
+
help="Use IEEE 754 hex format for floats (lossless)",
|
|
136
|
+
)
|
|
137
|
+
@click.option(
|
|
138
|
+
"--precision",
|
|
139
|
+
type=int,
|
|
140
|
+
default=6,
|
|
141
|
+
help="Decimal precision for float normalization (default: 6)",
|
|
142
|
+
)
|
|
143
|
+
@click.option(
|
|
144
|
+
"--format",
|
|
145
|
+
"output_format",
|
|
146
|
+
type=click.Choice(["yaml", "json"]),
|
|
147
|
+
default="yaml",
|
|
148
|
+
help="Output format (default: yaml)",
|
|
149
|
+
)
|
|
150
|
+
@click.option(
|
|
151
|
+
"--progress",
|
|
152
|
+
is_flag=True,
|
|
153
|
+
help="Show progress bar for batch processing",
|
|
154
|
+
)
|
|
155
|
+
@click.option(
|
|
156
|
+
"--parallel",
|
|
157
|
+
"-j",
|
|
158
|
+
"parallel_jobs",
|
|
159
|
+
type=int,
|
|
160
|
+
default=1,
|
|
161
|
+
help="Number of parallel jobs for batch processing (default: 1)",
|
|
162
|
+
)
|
|
163
|
+
@click.option(
|
|
164
|
+
"--in-place",
|
|
165
|
+
is_flag=True,
|
|
166
|
+
help="Modify files in place (same as not specifying -o)",
|
|
167
|
+
)
|
|
168
|
+
@click.option(
|
|
169
|
+
"--project-root",
|
|
170
|
+
type=click.Path(exists=True, path_type=Path),
|
|
171
|
+
help="Unity project root for script resolution (auto-detected if not specified)",
|
|
172
|
+
)
|
|
173
|
+
def normalize(
|
|
174
|
+
input_files: tuple[Path, ...],
|
|
175
|
+
output: Path | None,
|
|
176
|
+
stdout: bool,
|
|
177
|
+
changed_only: bool,
|
|
178
|
+
staged_only: bool,
|
|
179
|
+
since_ref: str | None,
|
|
180
|
+
pattern: str | None,
|
|
181
|
+
dry_run: bool,
|
|
182
|
+
hex_floats: bool,
|
|
183
|
+
precision: int,
|
|
184
|
+
output_format: str,
|
|
185
|
+
progress: bool,
|
|
186
|
+
parallel_jobs: int,
|
|
187
|
+
in_place: bool,
|
|
188
|
+
project_root: Path | None,
|
|
189
|
+
) -> None:
|
|
190
|
+
"""Normalize Unity YAML files for deterministic serialization.
|
|
191
|
+
|
|
192
|
+
INPUT_FILES are paths to .prefab, .unity, .asset, or other Unity YAML files.
|
|
193
|
+
|
|
194
|
+
Examples:
|
|
195
|
+
|
|
196
|
+
# Normalize in place
|
|
197
|
+
unityflow normalize Player.prefab
|
|
198
|
+
unityflow normalize MainScene.unity
|
|
199
|
+
unityflow normalize GameConfig.asset
|
|
200
|
+
|
|
201
|
+
# Normalize multiple files
|
|
202
|
+
unityflow normalize *.prefab *.unity *.asset
|
|
203
|
+
|
|
204
|
+
# Normalize to a new file
|
|
205
|
+
unityflow normalize Player.prefab -o Player.normalized.prefab
|
|
206
|
+
|
|
207
|
+
# Output to stdout
|
|
208
|
+
unityflow normalize Player.prefab --stdout
|
|
209
|
+
|
|
210
|
+
Incremental normalization (requires git):
|
|
211
|
+
|
|
212
|
+
# Normalize changed files only
|
|
213
|
+
unityflow normalize --changed-only
|
|
214
|
+
|
|
215
|
+
# Normalize staged files only
|
|
216
|
+
unityflow normalize --changed-only --staged-only
|
|
217
|
+
|
|
218
|
+
# Normalize files changed since a commit
|
|
219
|
+
unityflow normalize --since HEAD~5
|
|
220
|
+
|
|
221
|
+
# Normalize files changed since a branch
|
|
222
|
+
unityflow normalize --since main
|
|
223
|
+
|
|
224
|
+
# Filter by pattern
|
|
225
|
+
unityflow normalize --changed-only --pattern "Assets/**/*.unity"
|
|
226
|
+
|
|
227
|
+
# Dry run to see what would be normalized
|
|
228
|
+
unityflow normalize --changed-only --dry-run
|
|
229
|
+
|
|
230
|
+
Script-based field sync (auto-enabled when project root is found):
|
|
231
|
+
|
|
232
|
+
# With explicit project root for script resolution
|
|
233
|
+
unityflow normalize Player.prefab --project-root /path/to/unity/project
|
|
234
|
+
"""
|
|
235
|
+
# Collect files to normalize
|
|
236
|
+
files_to_normalize: list[Path] = []
|
|
237
|
+
|
|
238
|
+
# Git-based file selection
|
|
239
|
+
if changed_only or since_ref:
|
|
240
|
+
if not is_git_repository():
|
|
241
|
+
click.echo("Error: Not in a git repository", err=True)
|
|
242
|
+
sys.exit(1)
|
|
243
|
+
|
|
244
|
+
if changed_only:
|
|
245
|
+
files_to_normalize = get_changed_files(
|
|
246
|
+
staged_only=staged_only,
|
|
247
|
+
include_untracked=not staged_only,
|
|
248
|
+
)
|
|
249
|
+
elif since_ref:
|
|
250
|
+
files_to_normalize = get_files_changed_since(since_ref)
|
|
251
|
+
|
|
252
|
+
# Apply pattern filter (use PurePath.match for glob-style patterns)
|
|
253
|
+
if pattern and files_to_normalize:
|
|
254
|
+
repo_root = get_repo_root()
|
|
255
|
+
filtered = []
|
|
256
|
+
for f in files_to_normalize:
|
|
257
|
+
try:
|
|
258
|
+
rel_path = f.relative_to(repo_root) if repo_root else f
|
|
259
|
+
# PurePath.match supports ** glob patterns
|
|
260
|
+
if rel_path.match(pattern):
|
|
261
|
+
filtered.append(f)
|
|
262
|
+
except ValueError:
|
|
263
|
+
pass
|
|
264
|
+
files_to_normalize = filtered
|
|
265
|
+
|
|
266
|
+
# Explicit file arguments
|
|
267
|
+
if input_files:
|
|
268
|
+
explicit_files = list(input_files)
|
|
269
|
+
# Apply pattern filter to explicit files too
|
|
270
|
+
if pattern:
|
|
271
|
+
explicit_files = [f for f in explicit_files if f.match(pattern)]
|
|
272
|
+
files_to_normalize.extend(explicit_files)
|
|
273
|
+
|
|
274
|
+
# No files to process
|
|
275
|
+
if not files_to_normalize:
|
|
276
|
+
if changed_only:
|
|
277
|
+
click.echo("No changed Unity files found")
|
|
278
|
+
elif since_ref:
|
|
279
|
+
click.echo(f"No changed Unity files since {since_ref}")
|
|
280
|
+
else:
|
|
281
|
+
click.echo("Error: No input files specified", err=True)
|
|
282
|
+
click.echo("Use --changed-only, --since, or provide file paths", err=True)
|
|
283
|
+
sys.exit(1)
|
|
284
|
+
return
|
|
285
|
+
|
|
286
|
+
# Remove duplicates and sort
|
|
287
|
+
files_to_normalize = sorted(set(files_to_normalize))
|
|
288
|
+
|
|
289
|
+
# Dry run mode
|
|
290
|
+
if dry_run:
|
|
291
|
+
click.echo(f"Would normalize {len(files_to_normalize)} file(s):")
|
|
292
|
+
for f in files_to_normalize:
|
|
293
|
+
click.echo(f" {f}")
|
|
294
|
+
return
|
|
295
|
+
|
|
296
|
+
# Validate options for batch mode
|
|
297
|
+
if len(files_to_normalize) > 1:
|
|
298
|
+
if output:
|
|
299
|
+
click.echo("Error: --output cannot be used with multiple files", err=True)
|
|
300
|
+
sys.exit(1)
|
|
301
|
+
if stdout:
|
|
302
|
+
click.echo("Error: --stdout cannot be used with multiple files", err=True)
|
|
303
|
+
sys.exit(1)
|
|
304
|
+
|
|
305
|
+
if output_format == "json":
|
|
306
|
+
click.echo("Error: JSON format not yet implemented", err=True)
|
|
307
|
+
sys.exit(1)
|
|
308
|
+
|
|
309
|
+
normalizer_kwargs = {
|
|
310
|
+
"use_hex_floats": hex_floats,
|
|
311
|
+
"float_precision": precision,
|
|
312
|
+
"project_root": project_root,
|
|
313
|
+
}
|
|
314
|
+
|
|
315
|
+
normalizer = UnityPrefabNormalizer(**normalizer_kwargs)
|
|
316
|
+
|
|
317
|
+
# Process files
|
|
318
|
+
success_count = 0
|
|
319
|
+
error_count = 0
|
|
320
|
+
|
|
321
|
+
# Parallel processing for batch mode
|
|
322
|
+
if parallel_jobs > 1 and len(files_to_normalize) > 1 and not stdout and not output:
|
|
323
|
+
file_count = len(files_to_normalize)
|
|
324
|
+
click.echo(f"Processing {file_count} files with {parallel_jobs} parallel workers...")
|
|
325
|
+
|
|
326
|
+
tasks = [(f, normalizer_kwargs) for f in files_to_normalize]
|
|
327
|
+
|
|
328
|
+
with ProcessPoolExecutor(max_workers=parallel_jobs) as executor:
|
|
329
|
+
futures = {executor.submit(_normalize_single_file, task): task[0] for task in tasks}
|
|
330
|
+
|
|
331
|
+
if progress:
|
|
332
|
+
with click.progressbar(
|
|
333
|
+
length=len(files_to_normalize),
|
|
334
|
+
label="Normalizing",
|
|
335
|
+
show_eta=True,
|
|
336
|
+
show_percent=True,
|
|
337
|
+
) as bar:
|
|
338
|
+
for future in as_completed(futures):
|
|
339
|
+
file_path, success, error_msg = future.result()
|
|
340
|
+
if success:
|
|
341
|
+
success_count += 1
|
|
342
|
+
else:
|
|
343
|
+
error_count += 1
|
|
344
|
+
click.echo(f"\nError: {file_path}: {error_msg}", err=True)
|
|
345
|
+
bar.update(1)
|
|
346
|
+
else:
|
|
347
|
+
for future in as_completed(futures):
|
|
348
|
+
file_path, success, error_msg = future.result()
|
|
349
|
+
if success:
|
|
350
|
+
success_count += 1
|
|
351
|
+
click.echo(f"Normalized: {file_path}")
|
|
352
|
+
else:
|
|
353
|
+
error_count += 1
|
|
354
|
+
click.echo(f"Error: {file_path}: {error_msg}", err=True)
|
|
355
|
+
|
|
356
|
+
# Sequential processing
|
|
357
|
+
else:
|
|
358
|
+
if progress and len(files_to_normalize) > 1:
|
|
359
|
+
files_iter = click.progressbar(
|
|
360
|
+
files_to_normalize,
|
|
361
|
+
label="Normalizing",
|
|
362
|
+
show_eta=True,
|
|
363
|
+
show_percent=True,
|
|
364
|
+
)
|
|
365
|
+
else:
|
|
366
|
+
files_iter = files_to_normalize
|
|
367
|
+
|
|
368
|
+
for input_file in files_iter:
|
|
369
|
+
try:
|
|
370
|
+
content = normalizer.normalize_file(input_file)
|
|
371
|
+
|
|
372
|
+
if stdout:
|
|
373
|
+
click.echo(content, nl=False)
|
|
374
|
+
elif output:
|
|
375
|
+
output.write_text(content, encoding="utf-8", newline="\n")
|
|
376
|
+
if not progress:
|
|
377
|
+
click.echo(f"Normalized: {input_file} -> {output}")
|
|
378
|
+
else:
|
|
379
|
+
input_file.write_text(content, encoding="utf-8", newline="\n")
|
|
380
|
+
if not progress:
|
|
381
|
+
click.echo(f"Normalized: {input_file}")
|
|
382
|
+
|
|
383
|
+
success_count += 1
|
|
384
|
+
|
|
385
|
+
except Exception as e:
|
|
386
|
+
if progress:
|
|
387
|
+
click.echo(f"\nError: Failed to normalize {input_file}: {e}", err=True)
|
|
388
|
+
else:
|
|
389
|
+
click.echo(f"Error: Failed to normalize {input_file}: {e}", err=True)
|
|
390
|
+
error_count += 1
|
|
391
|
+
|
|
392
|
+
# Summary for batch mode
|
|
393
|
+
if len(files_to_normalize) > 1:
|
|
394
|
+
click.echo()
|
|
395
|
+
click.echo(f"Completed: {success_count} normalized, {error_count} failed")
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
@main.command()
|
|
399
|
+
@click.argument("old_file", type=click.Path(exists=True, path_type=Path))
|
|
400
|
+
@click.argument("new_file", type=click.Path(exists=True, path_type=Path))
|
|
401
|
+
@click.option(
|
|
402
|
+
"--no-normalize",
|
|
403
|
+
is_flag=True,
|
|
404
|
+
help="Don't normalize files before diffing",
|
|
405
|
+
)
|
|
406
|
+
@click.option(
|
|
407
|
+
"--context",
|
|
408
|
+
"-C",
|
|
409
|
+
type=int,
|
|
410
|
+
default=3,
|
|
411
|
+
help="Number of context lines (default: 3)",
|
|
412
|
+
)
|
|
413
|
+
@click.option(
|
|
414
|
+
"--format",
|
|
415
|
+
"diff_format",
|
|
416
|
+
type=click.Choice(["unified", "context", "summary"]),
|
|
417
|
+
default="unified",
|
|
418
|
+
help="Diff output format (default: unified)",
|
|
419
|
+
)
|
|
420
|
+
@click.option(
|
|
421
|
+
"--exit-code",
|
|
422
|
+
is_flag=True,
|
|
423
|
+
help="Exit with 1 if files differ, 0 if identical",
|
|
424
|
+
)
|
|
425
|
+
def diff(
|
|
426
|
+
old_file: Path,
|
|
427
|
+
new_file: Path,
|
|
428
|
+
no_normalize: bool,
|
|
429
|
+
context: int,
|
|
430
|
+
diff_format: str,
|
|
431
|
+
exit_code: bool,
|
|
432
|
+
) -> None:
|
|
433
|
+
"""Show differences between two Unity YAML files.
|
|
434
|
+
|
|
435
|
+
Normalizes both files before comparison to eliminate noise
|
|
436
|
+
from Unity's non-deterministic serialization.
|
|
437
|
+
|
|
438
|
+
Examples:
|
|
439
|
+
|
|
440
|
+
# Compare two prefabs
|
|
441
|
+
unityflow diff old.prefab new.prefab
|
|
442
|
+
|
|
443
|
+
# Show raw diff without normalization
|
|
444
|
+
unityflow diff old.prefab new.prefab --no-normalize
|
|
445
|
+
|
|
446
|
+
# Exit with status code (for scripts)
|
|
447
|
+
unityflow diff old.prefab new.prefab --exit-code
|
|
448
|
+
"""
|
|
449
|
+
format_map = {
|
|
450
|
+
"unified": DiffFormat.UNIFIED,
|
|
451
|
+
"context": DiffFormat.CONTEXT,
|
|
452
|
+
"summary": DiffFormat.SUMMARY,
|
|
453
|
+
}
|
|
454
|
+
|
|
455
|
+
differ = PrefabDiff(
|
|
456
|
+
normalize=not no_normalize,
|
|
457
|
+
context_lines=context,
|
|
458
|
+
format=format_map[diff_format],
|
|
459
|
+
)
|
|
460
|
+
|
|
461
|
+
try:
|
|
462
|
+
result = differ.diff_files(old_file, new_file)
|
|
463
|
+
except Exception as e:
|
|
464
|
+
click.echo(f"Error: Failed to diff files: {e}", err=True)
|
|
465
|
+
sys.exit(1)
|
|
466
|
+
|
|
467
|
+
if result.has_changes:
|
|
468
|
+
click.echo("\n".join(result.diff_lines))
|
|
469
|
+
if exit_code:
|
|
470
|
+
sys.exit(1)
|
|
471
|
+
else:
|
|
472
|
+
click.echo("Files are identical (after normalization)")
|
|
473
|
+
if exit_code:
|
|
474
|
+
sys.exit(0)
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
@main.command()
|
|
478
|
+
@click.argument("files", nargs=-1, type=click.Path(exists=True, path_type=Path), required=True)
|
|
479
|
+
@click.option(
|
|
480
|
+
"--strict",
|
|
481
|
+
is_flag=True,
|
|
482
|
+
help="Treat warnings as errors",
|
|
483
|
+
)
|
|
484
|
+
@click.option(
|
|
485
|
+
"--format",
|
|
486
|
+
"output_format",
|
|
487
|
+
type=click.Choice(["text", "json"]),
|
|
488
|
+
default="text",
|
|
489
|
+
help="Output format (default: text)",
|
|
490
|
+
)
|
|
491
|
+
@click.option(
|
|
492
|
+
"--quiet",
|
|
493
|
+
"-q",
|
|
494
|
+
is_flag=True,
|
|
495
|
+
help="Only output errors, suppress info and warnings",
|
|
496
|
+
)
|
|
497
|
+
def validate(
|
|
498
|
+
files: tuple[Path, ...],
|
|
499
|
+
strict: bool,
|
|
500
|
+
output_format: str,
|
|
501
|
+
quiet: bool,
|
|
502
|
+
) -> None:
|
|
503
|
+
"""Validate Unity YAML files for structural correctness.
|
|
504
|
+
|
|
505
|
+
Checks for:
|
|
506
|
+
- Valid YAML structure
|
|
507
|
+
- Duplicate fileIDs
|
|
508
|
+
- Missing required fields
|
|
509
|
+
- Broken internal references
|
|
510
|
+
|
|
511
|
+
Examples:
|
|
512
|
+
|
|
513
|
+
# Validate a single file
|
|
514
|
+
unityflow validate Player.prefab
|
|
515
|
+
unityflow validate MainScene.unity
|
|
516
|
+
unityflow validate GameConfig.asset
|
|
517
|
+
|
|
518
|
+
# Validate multiple files
|
|
519
|
+
unityflow validate *.prefab *.unity *.asset
|
|
520
|
+
|
|
521
|
+
# Strict validation (warnings are errors)
|
|
522
|
+
unityflow validate Player.prefab --strict
|
|
523
|
+
"""
|
|
524
|
+
validator = PrefabValidator(strict=strict)
|
|
525
|
+
any_invalid = False
|
|
526
|
+
|
|
527
|
+
for file in files:
|
|
528
|
+
result = validator.validate_file(file)
|
|
529
|
+
|
|
530
|
+
if not result.is_valid:
|
|
531
|
+
any_invalid = True
|
|
532
|
+
|
|
533
|
+
if output_format == "json":
|
|
534
|
+
import json
|
|
535
|
+
|
|
536
|
+
output = {
|
|
537
|
+
"path": str(file),
|
|
538
|
+
"valid": result.is_valid,
|
|
539
|
+
"issues": [
|
|
540
|
+
{
|
|
541
|
+
"severity": i.severity.value,
|
|
542
|
+
"message": i.message,
|
|
543
|
+
"fileID": i.file_id,
|
|
544
|
+
"propertyPath": i.property_path,
|
|
545
|
+
"suggestion": i.suggestion,
|
|
546
|
+
}
|
|
547
|
+
for i in result.issues
|
|
548
|
+
],
|
|
549
|
+
}
|
|
550
|
+
click.echo(json.dumps(output, indent=2))
|
|
551
|
+
else:
|
|
552
|
+
if quiet:
|
|
553
|
+
if result.errors:
|
|
554
|
+
click.echo(f"{file}: INVALID")
|
|
555
|
+
for issue in result.errors:
|
|
556
|
+
click.echo(f" {issue}")
|
|
557
|
+
else:
|
|
558
|
+
click.echo(result)
|
|
559
|
+
click.echo()
|
|
560
|
+
|
|
561
|
+
if any_invalid:
|
|
562
|
+
sys.exit(1)
|
|
563
|
+
|
|
564
|
+
|
|
565
|
+
# ============================================================================
|
|
566
|
+
# Component GUID Mappings
|
|
567
|
+
# ============================================================================
|
|
568
|
+
|
|
569
|
+
PACKAGE_COMPONENT_GUIDS: dict[str, str] = {
|
|
570
|
+
# Unity UI (com.unity.ugui)
|
|
571
|
+
"Image": "fe87c0e1cc204ed48ad3b37840f39efc",
|
|
572
|
+
"Button": "4e29b1a8efbd4b44bb3f3716e73f07ff",
|
|
573
|
+
"ScrollRect": "1aa08ab6e0800fa44ae55d278d1423e3",
|
|
574
|
+
"Mask": "31a19414c41e5ae4aae2af33fee712f6",
|
|
575
|
+
"RectMask2D": "3312d7739989d2b4e91e6319e9a96d76",
|
|
576
|
+
"GraphicRaycaster": "dc42784cf147c0c48a680349fa168899",
|
|
577
|
+
"CanvasScaler": "0cd44c1031e13a943bb63640046fad76",
|
|
578
|
+
"VerticalLayoutGroup": "59f8146938fff824cb5fd77236b75775",
|
|
579
|
+
"HorizontalLayoutGroup": "30649d3a9faa99c48a7b1166b86bf2a0",
|
|
580
|
+
"ContentSizeFitter": "3245ec927659c4140ac4f8d17403cc18",
|
|
581
|
+
"TextMeshProUGUI": "f4688fdb7df04437aeb418b961361dc5",
|
|
582
|
+
"TMP_InputField": "2da0c512f12947e489f739169773d7ca",
|
|
583
|
+
"EventSystem": "76c392e42b5098c458856cdf6ecaaaa1",
|
|
584
|
+
"InputSystemUIInputModule": "01614664b831546d2ae94a42149d80ac",
|
|
585
|
+
# URP 2D Lighting
|
|
586
|
+
"Light2D": "073797afb82c5a1438f328866b10b3f0",
|
|
587
|
+
}
|
|
588
|
+
|
|
589
|
+
# Built-in component types (native Unity components)
|
|
590
|
+
BUILTIN_COMPONENT_TYPES = [
|
|
591
|
+
# Renderer
|
|
592
|
+
"SpriteRenderer",
|
|
593
|
+
"MeshRenderer",
|
|
594
|
+
"TrailRenderer",
|
|
595
|
+
"LineRenderer",
|
|
596
|
+
"SkinnedMeshRenderer",
|
|
597
|
+
# Camera & Light
|
|
598
|
+
"Camera",
|
|
599
|
+
"Light",
|
|
600
|
+
# Audio
|
|
601
|
+
"AudioSource",
|
|
602
|
+
"AudioListener",
|
|
603
|
+
# 3D Colliders
|
|
604
|
+
"BoxCollider",
|
|
605
|
+
"SphereCollider",
|
|
606
|
+
"CapsuleCollider",
|
|
607
|
+
"MeshCollider",
|
|
608
|
+
# 2D Colliders
|
|
609
|
+
"BoxCollider2D",
|
|
610
|
+
"CircleCollider2D",
|
|
611
|
+
"PolygonCollider2D",
|
|
612
|
+
"EdgeCollider2D",
|
|
613
|
+
"CapsuleCollider2D",
|
|
614
|
+
"CompositeCollider2D",
|
|
615
|
+
# Physics
|
|
616
|
+
"Rigidbody",
|
|
617
|
+
"Rigidbody2D",
|
|
618
|
+
"CharacterController",
|
|
619
|
+
# Animation
|
|
620
|
+
"Animator",
|
|
621
|
+
"Animation",
|
|
622
|
+
# UI
|
|
623
|
+
"Canvas",
|
|
624
|
+
"CanvasGroup",
|
|
625
|
+
"CanvasRenderer",
|
|
626
|
+
# Misc
|
|
627
|
+
"MeshFilter",
|
|
628
|
+
"TextMesh",
|
|
629
|
+
"ParticleSystem",
|
|
630
|
+
"SpriteMask",
|
|
631
|
+
]
|
|
632
|
+
|
|
633
|
+
# All supported component types for --type option
|
|
634
|
+
ALL_COMPONENT_TYPES = BUILTIN_COMPONENT_TYPES + list(PACKAGE_COMPONENT_GUIDS.keys())
|
|
635
|
+
|
|
636
|
+
|
|
637
|
+
# ============================================================================
|
|
638
|
+
# Field Type Validation
|
|
639
|
+
# ============================================================================
|
|
640
|
+
|
|
641
|
+
|
|
642
|
+
class FieldType:
|
|
643
|
+
"""Unity field types for validation."""
|
|
644
|
+
|
|
645
|
+
VECTOR2 = "Vector2" # {x, y}
|
|
646
|
+
VECTOR3 = "Vector3" # {x, y, z}
|
|
647
|
+
VECTOR4 = "Vector4" # {x, y, z, w}
|
|
648
|
+
QUATERNION = "Quaternion" # {x, y, z, w}
|
|
649
|
+
COLOR = "Color" # {r, g, b, a}
|
|
650
|
+
BOOL = "bool" # 0 or 1
|
|
651
|
+
INT = "int" # integer
|
|
652
|
+
FLOAT = "float" # number
|
|
653
|
+
STRING = "string" # string
|
|
654
|
+
ASSET_REF = "AssetRef" # {fileID, guid, type}
|
|
655
|
+
|
|
656
|
+
|
|
657
|
+
# Field name to type mapping
|
|
658
|
+
FIELD_TYPES: dict[str, str] = {
|
|
659
|
+
# Transform / RectTransform - Vector3
|
|
660
|
+
"m_LocalPosition": FieldType.VECTOR3,
|
|
661
|
+
"m_LocalScale": FieldType.VECTOR3,
|
|
662
|
+
"m_LocalEulerAnglesHint": FieldType.VECTOR3,
|
|
663
|
+
"localPosition": FieldType.VECTOR3,
|
|
664
|
+
"localScale": FieldType.VECTOR3,
|
|
665
|
+
# Transform - Quaternion
|
|
666
|
+
"m_LocalRotation": FieldType.QUATERNION,
|
|
667
|
+
"localRotation": FieldType.QUATERNION,
|
|
668
|
+
# RectTransform - Vector2
|
|
669
|
+
"m_AnchorMin": FieldType.VECTOR2,
|
|
670
|
+
"m_AnchorMax": FieldType.VECTOR2,
|
|
671
|
+
"m_AnchoredPosition": FieldType.VECTOR2,
|
|
672
|
+
"m_SizeDelta": FieldType.VECTOR2,
|
|
673
|
+
"m_Pivot": FieldType.VECTOR2,
|
|
674
|
+
"anchorMin": FieldType.VECTOR2,
|
|
675
|
+
"anchorMax": FieldType.VECTOR2,
|
|
676
|
+
"anchoredPosition": FieldType.VECTOR2,
|
|
677
|
+
"sizeDelta": FieldType.VECTOR2,
|
|
678
|
+
"pivot": FieldType.VECTOR2,
|
|
679
|
+
# RectTransform - Vector4
|
|
680
|
+
"m_RaycastPadding": FieldType.VECTOR4,
|
|
681
|
+
"m_margin": FieldType.VECTOR4,
|
|
682
|
+
# Color fields
|
|
683
|
+
"m_Color": FieldType.COLOR,
|
|
684
|
+
"m_BackGroundColor": FieldType.COLOR,
|
|
685
|
+
"m_NormalColor": FieldType.COLOR,
|
|
686
|
+
"m_HighlightedColor": FieldType.COLOR,
|
|
687
|
+
"m_PressedColor": FieldType.COLOR,
|
|
688
|
+
"m_SelectedColor": FieldType.COLOR,
|
|
689
|
+
"m_DisabledColor": FieldType.COLOR,
|
|
690
|
+
# Common numeric fields
|
|
691
|
+
"m_Enabled": FieldType.BOOL,
|
|
692
|
+
"m_IsActive": FieldType.BOOL,
|
|
693
|
+
"m_RaycastTarget": FieldType.BOOL,
|
|
694
|
+
"m_Maskable": FieldType.BOOL,
|
|
695
|
+
"m_PreserveAspect": FieldType.BOOL,
|
|
696
|
+
"m_FillCenter": FieldType.BOOL,
|
|
697
|
+
"m_UseSpriteMesh": FieldType.BOOL,
|
|
698
|
+
# Asset reference fields
|
|
699
|
+
"m_Sprite": FieldType.ASSET_REF,
|
|
700
|
+
"m_Material": FieldType.ASSET_REF,
|
|
701
|
+
"m_Script": FieldType.ASSET_REF,
|
|
702
|
+
}
|
|
703
|
+
|
|
704
|
+
|
|
705
|
+
def _validate_field_value(field_name: str, value) -> tuple[bool, str | None]:
|
|
706
|
+
"""Validate a value against its expected field type.
|
|
707
|
+
|
|
708
|
+
Args:
|
|
709
|
+
field_name: The field name (e.g., "m_LocalPosition")
|
|
710
|
+
value: The value to validate
|
|
711
|
+
|
|
712
|
+
Returns:
|
|
713
|
+
Tuple of (is_valid, error_message). If valid, error_message is None.
|
|
714
|
+
"""
|
|
715
|
+
field_type = FIELD_TYPES.get(field_name)
|
|
716
|
+
|
|
717
|
+
if field_type is None:
|
|
718
|
+
# Unknown field, skip validation
|
|
719
|
+
return True, None
|
|
720
|
+
|
|
721
|
+
if field_type == FieldType.VECTOR2:
|
|
722
|
+
fmt = '{"x": 0, "y": 0}'
|
|
723
|
+
if not isinstance(value, dict):
|
|
724
|
+
return False, f"'{field_name}'은(는) Vector2 형식이어야 합니다: {fmt}"
|
|
725
|
+
required = {"x", "y"}
|
|
726
|
+
if not required.issubset(value.keys()):
|
|
727
|
+
missing = required - set(value.keys())
|
|
728
|
+
return False, f"'{field_name}'에 필수 키가 없습니다: {missing}"
|
|
729
|
+
for k in ["x", "y"]:
|
|
730
|
+
if not isinstance(value.get(k), (int, float)):
|
|
731
|
+
return False, f"'{field_name}.{k}'는 숫자여야 합니다"
|
|
732
|
+
return True, None
|
|
733
|
+
|
|
734
|
+
if field_type == FieldType.VECTOR3:
|
|
735
|
+
fmt = '{"x": 0, "y": 0, "z": 0}'
|
|
736
|
+
if not isinstance(value, dict):
|
|
737
|
+
return False, f"'{field_name}'은(는) Vector3 형식이어야 합니다: {fmt}"
|
|
738
|
+
required = {"x", "y", "z"}
|
|
739
|
+
if not required.issubset(value.keys()):
|
|
740
|
+
missing = required - set(value.keys())
|
|
741
|
+
return False, f"'{field_name}'에 필수 키가 없습니다: {missing}"
|
|
742
|
+
for k in ["x", "y", "z"]:
|
|
743
|
+
if not isinstance(value.get(k), (int, float)):
|
|
744
|
+
return False, f"'{field_name}.{k}'는 숫자여야 합니다"
|
|
745
|
+
return True, None
|
|
746
|
+
|
|
747
|
+
if field_type == FieldType.VECTOR4:
|
|
748
|
+
fmt = '{"x": 0, "y": 0, "z": 0, "w": 0}'
|
|
749
|
+
if not isinstance(value, dict):
|
|
750
|
+
return False, f"'{field_name}'은(는) Vector4 형식이어야 합니다: {fmt}"
|
|
751
|
+
required = {"x", "y", "z", "w"}
|
|
752
|
+
if not required.issubset(value.keys()):
|
|
753
|
+
missing = required - set(value.keys())
|
|
754
|
+
return False, f"'{field_name}'에 필수 키가 없습니다: {missing}"
|
|
755
|
+
for k in ["x", "y", "z", "w"]:
|
|
756
|
+
if not isinstance(value.get(k), (int, float)):
|
|
757
|
+
return False, f"'{field_name}.{k}'는 숫자여야 합니다"
|
|
758
|
+
return True, None
|
|
759
|
+
|
|
760
|
+
if field_type == FieldType.QUATERNION:
|
|
761
|
+
fmt = '{"x": 0, "y": 0, "z": 0, "w": 1}'
|
|
762
|
+
if not isinstance(value, dict):
|
|
763
|
+
return False, f"'{field_name}'은(는) Quaternion 형식이어야 합니다: {fmt}"
|
|
764
|
+
required = {"x", "y", "z", "w"}
|
|
765
|
+
if not required.issubset(value.keys()):
|
|
766
|
+
missing = required - set(value.keys())
|
|
767
|
+
return False, f"'{field_name}'에 필수 키가 없습니다: {missing}"
|
|
768
|
+
for k in ["x", "y", "z", "w"]:
|
|
769
|
+
if not isinstance(value.get(k), (int, float)):
|
|
770
|
+
return False, f"'{field_name}.{k}'는 숫자여야 합니다"
|
|
771
|
+
return True, None
|
|
772
|
+
|
|
773
|
+
if field_type == FieldType.COLOR:
|
|
774
|
+
fmt = '{"r": 1, "g": 1, "b": 1, "a": 1}'
|
|
775
|
+
if not isinstance(value, dict):
|
|
776
|
+
return False, f"'{field_name}'은(는) Color 형식이어야 합니다: {fmt}"
|
|
777
|
+
required = {"r", "g", "b", "a"}
|
|
778
|
+
if not required.issubset(value.keys()):
|
|
779
|
+
missing = required - set(value.keys())
|
|
780
|
+
return False, f"'{field_name}'에 필수 키가 없습니다: {missing}"
|
|
781
|
+
for k in ["r", "g", "b", "a"]:
|
|
782
|
+
if not isinstance(value.get(k), (int, float)):
|
|
783
|
+
return False, f"'{field_name}.{k}'는 숫자여야 합니다"
|
|
784
|
+
return True, None
|
|
785
|
+
|
|
786
|
+
if field_type == FieldType.BOOL:
|
|
787
|
+
if value not in (0, 1, True, False):
|
|
788
|
+
return False, f"'{field_name}'은(는) bool 형식이어야 합니다: 0 또는 1"
|
|
789
|
+
return True, None
|
|
790
|
+
|
|
791
|
+
if field_type == FieldType.INT:
|
|
792
|
+
if not isinstance(value, int) or isinstance(value, bool):
|
|
793
|
+
return False, f"'{field_name}'은(는) 정수여야 합니다"
|
|
794
|
+
return True, None
|
|
795
|
+
|
|
796
|
+
if field_type == FieldType.FLOAT:
|
|
797
|
+
if not isinstance(value, (int, float)) or isinstance(value, bool):
|
|
798
|
+
return False, f"'{field_name}'은(는) 숫자여야 합니다"
|
|
799
|
+
return True, None
|
|
800
|
+
|
|
801
|
+
if field_type == FieldType.STRING:
|
|
802
|
+
if not isinstance(value, str):
|
|
803
|
+
return False, f"'{field_name}'은(는) 문자열이어야 합니다"
|
|
804
|
+
return True, None
|
|
805
|
+
|
|
806
|
+
if field_type == FieldType.ASSET_REF:
|
|
807
|
+
# Asset references are validated separately by asset_resolver
|
|
808
|
+
# Skip validation here if it's already a resolved reference
|
|
809
|
+
if isinstance(value, dict) and "fileID" in value:
|
|
810
|
+
return True, None
|
|
811
|
+
# If it's a string starting with @, it will be resolved later
|
|
812
|
+
if isinstance(value, str) and value.startswith("@"):
|
|
813
|
+
return True, None
|
|
814
|
+
return False, f"'{field_name}'은(는) 에셋 참조여야 합니다: @Assets/path.ext"
|
|
815
|
+
|
|
816
|
+
return True, None
|
|
817
|
+
|
|
818
|
+
|
|
819
|
+
# ============================================================================
|
|
820
|
+
# Path Resolution Helpers
|
|
821
|
+
# ============================================================================
|
|
822
|
+
|
|
823
|
+
|
|
824
|
+
def _resolve_gameobject_by_path(
|
|
825
|
+
doc: UnityYAMLDocument,
|
|
826
|
+
path_spec: str,
|
|
827
|
+
) -> tuple[int | None, str | None]:
|
|
828
|
+
"""Resolve a GameObject by path specification.
|
|
829
|
+
|
|
830
|
+
Args:
|
|
831
|
+
doc: The Unity YAML document
|
|
832
|
+
path_spec: Path like "Canvas/Panel/Button" or "Canvas/Panel/Button[1]"
|
|
833
|
+
|
|
834
|
+
Returns:
|
|
835
|
+
Tuple of (fileID, error_message). If successful, error_message is None.
|
|
836
|
+
If failed, fileID is None and error_message contains the error.
|
|
837
|
+
"""
|
|
838
|
+
import re
|
|
839
|
+
|
|
840
|
+
# Parse path and optional index
|
|
841
|
+
index_match = re.match(r"^(.+)\[(\d+)\]$", path_spec)
|
|
842
|
+
if index_match:
|
|
843
|
+
path = index_match.group(1)
|
|
844
|
+
index = int(index_match.group(2))
|
|
845
|
+
else:
|
|
846
|
+
path = path_spec
|
|
847
|
+
index = None
|
|
848
|
+
|
|
849
|
+
# Build transform hierarchy
|
|
850
|
+
transforms: dict[int, dict] = {} # transform_id -> {gameObject, parent}
|
|
851
|
+
go_names: dict[int, str] = {} # go_id -> name
|
|
852
|
+
go_transforms: dict[int, int] = {} # go_id -> transform_id
|
|
853
|
+
|
|
854
|
+
for obj in doc.objects:
|
|
855
|
+
if obj.class_id == 4 or obj.class_id == 224: # Transform or RectTransform
|
|
856
|
+
content = obj.get_content()
|
|
857
|
+
if content:
|
|
858
|
+
go_ref = content.get("m_GameObject", {})
|
|
859
|
+
go_id = go_ref.get("fileID", 0) if isinstance(go_ref, dict) else 0
|
|
860
|
+
father = content.get("m_Father", {})
|
|
861
|
+
father_id = father.get("fileID", 0) if isinstance(father, dict) else 0
|
|
862
|
+
transforms[obj.file_id] = {
|
|
863
|
+
"gameObject": go_id,
|
|
864
|
+
"parent": father_id,
|
|
865
|
+
}
|
|
866
|
+
if go_id:
|
|
867
|
+
go_transforms[go_id] = obj.file_id
|
|
868
|
+
|
|
869
|
+
for obj in doc.objects:
|
|
870
|
+
if obj.class_id == 1: # GameObject
|
|
871
|
+
content = obj.get_content()
|
|
872
|
+
if content:
|
|
873
|
+
go_names[obj.file_id] = content.get("m_Name", "")
|
|
874
|
+
|
|
875
|
+
# Build path for each GameObject
|
|
876
|
+
def build_path(transform_id: int, visited: set[int]) -> str:
|
|
877
|
+
if transform_id in visited or transform_id not in transforms:
|
|
878
|
+
return ""
|
|
879
|
+
visited.add(transform_id)
|
|
880
|
+
|
|
881
|
+
t = transforms[transform_id]
|
|
882
|
+
name = go_names.get(t["gameObject"], "")
|
|
883
|
+
|
|
884
|
+
if t["parent"] == 0:
|
|
885
|
+
return name
|
|
886
|
+
else:
|
|
887
|
+
parent_path = build_path(t["parent"], visited)
|
|
888
|
+
if parent_path:
|
|
889
|
+
return f"{parent_path}/{name}"
|
|
890
|
+
return name
|
|
891
|
+
|
|
892
|
+
# Find all GameObjects matching the path
|
|
893
|
+
matches: list[tuple[int, str]] = [] # (go_id, full_path)
|
|
894
|
+
for go_id, transform_id in go_transforms.items():
|
|
895
|
+
full_path = build_path(transform_id, set())
|
|
896
|
+
if full_path == path:
|
|
897
|
+
matches.append((go_id, full_path))
|
|
898
|
+
|
|
899
|
+
if not matches:
|
|
900
|
+
return None, f"GameObject not found at path '{path}'"
|
|
901
|
+
|
|
902
|
+
if len(matches) == 1:
|
|
903
|
+
return matches[0][0], None
|
|
904
|
+
|
|
905
|
+
# Multiple matches
|
|
906
|
+
if index is not None:
|
|
907
|
+
if index < len(matches):
|
|
908
|
+
return matches[index][0], None
|
|
909
|
+
else:
|
|
910
|
+
count = len(matches)
|
|
911
|
+
return None, f"Index [{index}] out of range. Found {count} GameObjects at '{path}'"
|
|
912
|
+
|
|
913
|
+
# No index specified, show options
|
|
914
|
+
error_lines = [f"Multiple GameObjects at path '{path}'."]
|
|
915
|
+
error_lines.append(f'Use index to select: --to "{path}[0]" (0 to {len(matches) - 1})')
|
|
916
|
+
return None, "\n".join(error_lines)
|
|
917
|
+
|
|
918
|
+
|
|
919
|
+
def _resolve_component_path(
|
|
920
|
+
doc: UnityYAMLDocument,
|
|
921
|
+
path_spec: str,
|
|
922
|
+
) -> tuple[str | None, str | None]:
|
|
923
|
+
"""Resolve a component path to the internal format.
|
|
924
|
+
|
|
925
|
+
Converts paths like:
|
|
926
|
+
"Player/SpriteRenderer/m_Color" -> "components/12345/m_Color"
|
|
927
|
+
"Canvas/Panel/Button/Image/m_Sprite" -> "components/67890/m_Sprite"
|
|
928
|
+
"Canvas/Button/Image[1]/m_Color" -> "components/11111/m_Color"
|
|
929
|
+
"Player/name" -> "gameObjects/12345/name"
|
|
930
|
+
"Canvas/Panel/RectTransform" -> "components/12345" (for batch mode)
|
|
931
|
+
|
|
932
|
+
Args:
|
|
933
|
+
doc: The Unity YAML document
|
|
934
|
+
path_spec: Path like "Player/SpriteRenderer/m_Color"
|
|
935
|
+
|
|
936
|
+
Returns:
|
|
937
|
+
Tuple of (resolved_path, error_message). If successful, error_message is None.
|
|
938
|
+
"""
|
|
939
|
+
import re
|
|
940
|
+
|
|
941
|
+
from unityflow.parser import CLASS_IDS
|
|
942
|
+
|
|
943
|
+
# Check if already in internal format (components/12345/... or gameObjects/12345/...)
|
|
944
|
+
if re.match(r"^(components|gameObjects)/\d+", path_spec):
|
|
945
|
+
return path_spec, None
|
|
946
|
+
|
|
947
|
+
parts = path_spec.split("/")
|
|
948
|
+
if len(parts) < 2:
|
|
949
|
+
return None, f"Invalid path format: {path_spec}"
|
|
950
|
+
|
|
951
|
+
# Build reverse mapping: class name -> class IDs
|
|
952
|
+
name_to_ids: dict[str, list[int]] = {}
|
|
953
|
+
for class_id, class_name in CLASS_IDS.items():
|
|
954
|
+
name_lower = class_name.lower()
|
|
955
|
+
if name_lower not in name_to_ids:
|
|
956
|
+
name_to_ids[name_lower] = []
|
|
957
|
+
name_to_ids[name_lower].append(class_id)
|
|
958
|
+
|
|
959
|
+
# Also add package component names (they're MonoBehaviour)
|
|
960
|
+
package_components = {
|
|
961
|
+
"image",
|
|
962
|
+
"button",
|
|
963
|
+
"scrollrect",
|
|
964
|
+
"mask",
|
|
965
|
+
"rectmask2d",
|
|
966
|
+
"graphicraycaster",
|
|
967
|
+
"canvasscaler",
|
|
968
|
+
"verticallayoutgroup",
|
|
969
|
+
"horizontallayoutgroup",
|
|
970
|
+
"contentsizefitter",
|
|
971
|
+
"textmeshprougui",
|
|
972
|
+
"tmp_inputfield",
|
|
973
|
+
"eventsystem",
|
|
974
|
+
"inputsystemuiinputmodule",
|
|
975
|
+
"light2d",
|
|
976
|
+
}
|
|
977
|
+
|
|
978
|
+
# Check if the LAST part is a component type (for batch mode - path ends with component)
|
|
979
|
+
# e.g., "Canvas/Panel/RectTransform" -> path to the component itself, no property
|
|
980
|
+
last_part_match = re.match(r"^([A-Za-z][A-Za-z0-9]*)(?:\[(\d+)\])?$", parts[-1])
|
|
981
|
+
if last_part_match:
|
|
982
|
+
last_component_type = last_part_match.group(1)
|
|
983
|
+
last_component_index = int(last_part_match.group(2)) if last_part_match.group(2) else None
|
|
984
|
+
last_component_type_lower = last_component_type.lower()
|
|
985
|
+
|
|
986
|
+
# Check if last part is a known component type
|
|
987
|
+
last_is_component = (
|
|
988
|
+
last_component_type_lower in name_to_ids
|
|
989
|
+
or last_component_type_lower in package_components
|
|
990
|
+
or last_component_type == "MonoBehaviour"
|
|
991
|
+
)
|
|
992
|
+
|
|
993
|
+
if last_is_component:
|
|
994
|
+
# Path format: GameObject.../ComponentType (no property - for batch mode)
|
|
995
|
+
go_path = "/".join(parts[:-1])
|
|
996
|
+
if not go_path:
|
|
997
|
+
return None, f"Invalid path: missing GameObject path before {last_component_type}"
|
|
998
|
+
|
|
999
|
+
# Resolve GameObject
|
|
1000
|
+
go_id, error = _resolve_gameobject_by_path(doc, go_path)
|
|
1001
|
+
if error:
|
|
1002
|
+
return None, error
|
|
1003
|
+
|
|
1004
|
+
# Find the component
|
|
1005
|
+
go = doc.get_by_file_id(go_id)
|
|
1006
|
+
if not go:
|
|
1007
|
+
return None, "GameObject not found"
|
|
1008
|
+
|
|
1009
|
+
go_content = go.get_content()
|
|
1010
|
+
if not go_content or "m_Component" not in go_content:
|
|
1011
|
+
return None, "GameObject has no components"
|
|
1012
|
+
|
|
1013
|
+
# Find matching components
|
|
1014
|
+
matching_components: list[int] = []
|
|
1015
|
+
for comp_ref in go_content["m_Component"]:
|
|
1016
|
+
comp_id = comp_ref.get("component", {}).get("fileID", 0)
|
|
1017
|
+
comp = doc.get_by_file_id(comp_id)
|
|
1018
|
+
if not comp:
|
|
1019
|
+
continue
|
|
1020
|
+
|
|
1021
|
+
# Check if component matches the type
|
|
1022
|
+
comp_class_name = comp.class_name.lower()
|
|
1023
|
+
|
|
1024
|
+
# For package components (MonoBehaviour), check script GUID
|
|
1025
|
+
if last_component_type_lower in package_components:
|
|
1026
|
+
if comp.class_id == 114: # MonoBehaviour
|
|
1027
|
+
comp_content = comp.get_content()
|
|
1028
|
+
if comp_content:
|
|
1029
|
+
script_ref = comp_content.get("m_Script", {})
|
|
1030
|
+
if isinstance(script_ref, dict):
|
|
1031
|
+
script_guid = script_ref.get("guid", "")
|
|
1032
|
+
else:
|
|
1033
|
+
script_guid = ""
|
|
1034
|
+
# Check if GUID matches the package component
|
|
1035
|
+
# Use case-insensitive key lookup
|
|
1036
|
+
expected_guid = ""
|
|
1037
|
+
for key, guid in PACKAGE_COMPONENT_GUIDS.items():
|
|
1038
|
+
if key.lower() == last_component_type_lower:
|
|
1039
|
+
expected_guid = guid.lower()
|
|
1040
|
+
break
|
|
1041
|
+
if script_guid.lower() == expected_guid:
|
|
1042
|
+
matching_components.append(comp_id)
|
|
1043
|
+
elif comp_class_name == last_component_type_lower:
|
|
1044
|
+
matching_components.append(comp_id)
|
|
1045
|
+
|
|
1046
|
+
if not matching_components:
|
|
1047
|
+
return None, f"Component '{last_component_type}' not found on '{go_path}'"
|
|
1048
|
+
|
|
1049
|
+
if len(matching_components) == 1:
|
|
1050
|
+
# Return component path without property (for batch mode)
|
|
1051
|
+
return f"components/{matching_components[0]}", None
|
|
1052
|
+
|
|
1053
|
+
# Multiple matches
|
|
1054
|
+
if last_component_index is not None:
|
|
1055
|
+
if last_component_index < len(matching_components):
|
|
1056
|
+
comp_id = matching_components[last_component_index]
|
|
1057
|
+
return f"components/{comp_id}", None
|
|
1058
|
+
else:
|
|
1059
|
+
count = len(matching_components)
|
|
1060
|
+
idx = last_component_index
|
|
1061
|
+
return None, f"Index [{idx}] out of range. Found {count} components"
|
|
1062
|
+
|
|
1063
|
+
# No index specified
|
|
1064
|
+
comp_type = last_component_type
|
|
1065
|
+
error_lines = [f"Multiple '{comp_type}' components on '{go_path}'."]
|
|
1066
|
+
max_idx = len(matching_components) - 1
|
|
1067
|
+
error_lines.append(f'Use index: "{go_path}/{comp_type}[0]" (0-{max_idx})')
|
|
1068
|
+
return None, "\n".join(error_lines)
|
|
1069
|
+
|
|
1070
|
+
# Last part is the property name
|
|
1071
|
+
property_name = parts[-1]
|
|
1072
|
+
|
|
1073
|
+
# Check if second-to-last part is a component type (with optional index)
|
|
1074
|
+
component_match = re.match(r"^([A-Za-z][A-Za-z0-9]*)(?:\[(\d+)\])?$", parts[-2])
|
|
1075
|
+
|
|
1076
|
+
if component_match:
|
|
1077
|
+
component_type = component_match.group(1)
|
|
1078
|
+
component_index = int(component_match.group(2)) if component_match.group(2) else None
|
|
1079
|
+
component_type_lower = component_type.lower()
|
|
1080
|
+
|
|
1081
|
+
# Check if it's a known component type
|
|
1082
|
+
is_component = (
|
|
1083
|
+
component_type_lower in name_to_ids
|
|
1084
|
+
or component_type_lower in package_components
|
|
1085
|
+
or component_type == "MonoBehaviour"
|
|
1086
|
+
)
|
|
1087
|
+
|
|
1088
|
+
if is_component:
|
|
1089
|
+
# Path format: GameObject.../ComponentType/property
|
|
1090
|
+
go_path = "/".join(parts[:-2])
|
|
1091
|
+
if not go_path:
|
|
1092
|
+
return None, f"Invalid path: missing GameObject path before {component_type}"
|
|
1093
|
+
|
|
1094
|
+
# Resolve GameObject
|
|
1095
|
+
go_id, error = _resolve_gameobject_by_path(doc, go_path)
|
|
1096
|
+
if error:
|
|
1097
|
+
return None, error
|
|
1098
|
+
|
|
1099
|
+
# Find the component
|
|
1100
|
+
go = doc.get_by_file_id(go_id)
|
|
1101
|
+
if not go:
|
|
1102
|
+
return None, "GameObject not found"
|
|
1103
|
+
|
|
1104
|
+
go_content = go.get_content()
|
|
1105
|
+
if not go_content or "m_Component" not in go_content:
|
|
1106
|
+
return None, "GameObject has no components"
|
|
1107
|
+
|
|
1108
|
+
# Find matching components
|
|
1109
|
+
matching_components: list[int] = []
|
|
1110
|
+
for comp_ref in go_content["m_Component"]:
|
|
1111
|
+
comp_id = comp_ref.get("component", {}).get("fileID", 0)
|
|
1112
|
+
comp = doc.get_by_file_id(comp_id)
|
|
1113
|
+
if not comp:
|
|
1114
|
+
continue
|
|
1115
|
+
|
|
1116
|
+
# Check if component matches the type
|
|
1117
|
+
comp_class_name = comp.class_name.lower()
|
|
1118
|
+
|
|
1119
|
+
# For package components (MonoBehaviour), check script GUID
|
|
1120
|
+
if component_type_lower in package_components:
|
|
1121
|
+
if comp.class_id == 114: # MonoBehaviour
|
|
1122
|
+
comp_content = comp.get_content()
|
|
1123
|
+
if comp_content:
|
|
1124
|
+
script_ref = comp_content.get("m_Script", {})
|
|
1125
|
+
if isinstance(script_ref, dict):
|
|
1126
|
+
script_guid = script_ref.get("guid", "")
|
|
1127
|
+
else:
|
|
1128
|
+
script_guid = ""
|
|
1129
|
+
# Check if GUID matches the package component
|
|
1130
|
+
# Use case-insensitive key lookup
|
|
1131
|
+
expected_guid = ""
|
|
1132
|
+
for key, guid in PACKAGE_COMPONENT_GUIDS.items():
|
|
1133
|
+
if key.lower() == component_type_lower:
|
|
1134
|
+
expected_guid = guid.lower()
|
|
1135
|
+
break
|
|
1136
|
+
if script_guid.lower() == expected_guid:
|
|
1137
|
+
matching_components.append(comp_id)
|
|
1138
|
+
elif comp_class_name == component_type_lower:
|
|
1139
|
+
matching_components.append(comp_id)
|
|
1140
|
+
|
|
1141
|
+
if not matching_components:
|
|
1142
|
+
return None, f"Component '{component_type}' not found on '{go_path}'"
|
|
1143
|
+
|
|
1144
|
+
if len(matching_components) == 1:
|
|
1145
|
+
return f"components/{matching_components[0]}/{property_name}", None
|
|
1146
|
+
|
|
1147
|
+
# Multiple matches
|
|
1148
|
+
if component_index is not None:
|
|
1149
|
+
if component_index < len(matching_components):
|
|
1150
|
+
comp_id = matching_components[component_index]
|
|
1151
|
+
return f"components/{comp_id}/{property_name}", None
|
|
1152
|
+
else:
|
|
1153
|
+
count = len(matching_components)
|
|
1154
|
+
return None, f"Index [{component_index}] out of range. Found {count}"
|
|
1155
|
+
|
|
1156
|
+
# No index specified
|
|
1157
|
+
comp_type = component_type
|
|
1158
|
+
error_lines = [f"Multiple '{comp_type}' components on '{go_path}'."]
|
|
1159
|
+
max_idx = len(matching_components) - 1
|
|
1160
|
+
error_lines.append(f'Use index: "{go_path}/{comp_type}[0]/..." (0-{max_idx})')
|
|
1161
|
+
return None, "\n".join(error_lines)
|
|
1162
|
+
|
|
1163
|
+
# Not a component path - treat as GameObject property
|
|
1164
|
+
# Path format: GameObject.../property
|
|
1165
|
+
go_path = "/".join(parts[:-1])
|
|
1166
|
+
go_id, error = _resolve_gameobject_by_path(doc, go_path)
|
|
1167
|
+
if error:
|
|
1168
|
+
return None, error
|
|
1169
|
+
|
|
1170
|
+
return f"gameObjects/{go_id}/{property_name}", None
|
|
1171
|
+
|
|
1172
|
+
|
|
1173
|
+
@main.command(name="get")
|
|
1174
|
+
@click.argument("file", type=click.Path(exists=True, path_type=Path))
|
|
1175
|
+
@click.argument("path_spec", type=str)
|
|
1176
|
+
@click.option(
|
|
1177
|
+
"--format",
|
|
1178
|
+
"output_format",
|
|
1179
|
+
type=click.Choice(["json", "text"]),
|
|
1180
|
+
default="json",
|
|
1181
|
+
help="Output format (default: json)",
|
|
1182
|
+
)
|
|
1183
|
+
def get_value_cmd(
|
|
1184
|
+
file: Path,
|
|
1185
|
+
path_spec: str,
|
|
1186
|
+
output_format: str,
|
|
1187
|
+
) -> None:
|
|
1188
|
+
"""Get a value at a specific path in a Unity YAML file.
|
|
1189
|
+
|
|
1190
|
+
Path Format:
|
|
1191
|
+
GameObject/ComponentType/property - Component property
|
|
1192
|
+
GameObject/property - GameObject property
|
|
1193
|
+
|
|
1194
|
+
Examples:
|
|
1195
|
+
|
|
1196
|
+
# Get Transform position
|
|
1197
|
+
unityflow get Player.prefab "Player/Transform/localPosition"
|
|
1198
|
+
|
|
1199
|
+
# Get SpriteRenderer color
|
|
1200
|
+
unityflow get Player.prefab "Player/SpriteRenderer/m_Color"
|
|
1201
|
+
|
|
1202
|
+
# Get GameObject name
|
|
1203
|
+
unityflow get Player.prefab "Player/name"
|
|
1204
|
+
|
|
1205
|
+
# Get all properties of a component
|
|
1206
|
+
unityflow get Player.prefab "Player/Transform"
|
|
1207
|
+
|
|
1208
|
+
# When multiple components of same type exist, use index
|
|
1209
|
+
unityflow get Scene.unity "Canvas/Panel/Image[1]/m_Color"
|
|
1210
|
+
|
|
1211
|
+
# Output as text (for simple values)
|
|
1212
|
+
unityflow get Player.prefab "Player/Transform/localPosition" --format text
|
|
1213
|
+
"""
|
|
1214
|
+
import json
|
|
1215
|
+
|
|
1216
|
+
from unityflow.parser import UnityYAMLDocument
|
|
1217
|
+
from unityflow.query import get_value
|
|
1218
|
+
|
|
1219
|
+
try:
|
|
1220
|
+
doc = UnityYAMLDocument.load(file)
|
|
1221
|
+
except Exception as e:
|
|
1222
|
+
click.echo(f"Error: Failed to load {file}: {e}", err=True)
|
|
1223
|
+
sys.exit(1)
|
|
1224
|
+
|
|
1225
|
+
# Resolve path (convert "Player/Transform/localPosition" to "components/12345/localPosition")
|
|
1226
|
+
resolved_path, error = _resolve_component_path(doc, path_spec)
|
|
1227
|
+
if error:
|
|
1228
|
+
click.echo(f"Error: {error}", err=True)
|
|
1229
|
+
sys.exit(1)
|
|
1230
|
+
|
|
1231
|
+
# Get the value
|
|
1232
|
+
value = get_value(doc, resolved_path)
|
|
1233
|
+
if value is None:
|
|
1234
|
+
click.echo(f"Error: No value found at path '{path_spec}'", err=True)
|
|
1235
|
+
sys.exit(1)
|
|
1236
|
+
|
|
1237
|
+
# Output
|
|
1238
|
+
if output_format == "json":
|
|
1239
|
+
click.echo(json.dumps(value, indent=2, default=str))
|
|
1240
|
+
else:
|
|
1241
|
+
# Text format - simple representation
|
|
1242
|
+
if isinstance(value, dict):
|
|
1243
|
+
for k, v in value.items():
|
|
1244
|
+
click.echo(f"{k}: {v}")
|
|
1245
|
+
elif isinstance(value, list):
|
|
1246
|
+
for item in value:
|
|
1247
|
+
click.echo(str(item))
|
|
1248
|
+
else:
|
|
1249
|
+
click.echo(str(value))
|
|
1250
|
+
|
|
1251
|
+
|
|
1252
|
+
@main.command(name="set")
|
|
1253
|
+
@click.argument("file", type=click.Path(exists=True, path_type=Path))
|
|
1254
|
+
@click.option(
|
|
1255
|
+
"--path",
|
|
1256
|
+
"-p",
|
|
1257
|
+
"set_path",
|
|
1258
|
+
required=True,
|
|
1259
|
+
help="Path to the value (e.g., 'Player/Transform/localPosition')",
|
|
1260
|
+
)
|
|
1261
|
+
@click.option(
|
|
1262
|
+
"--value",
|
|
1263
|
+
"-v",
|
|
1264
|
+
default=None,
|
|
1265
|
+
help="Value to set (JSON format for complex values)",
|
|
1266
|
+
)
|
|
1267
|
+
@click.option(
|
|
1268
|
+
"--batch",
|
|
1269
|
+
"-b",
|
|
1270
|
+
"batch_values_json",
|
|
1271
|
+
default=None,
|
|
1272
|
+
help="JSON object with multiple key-value pairs to set at once",
|
|
1273
|
+
)
|
|
1274
|
+
@click.option(
|
|
1275
|
+
"-o",
|
|
1276
|
+
"--output",
|
|
1277
|
+
type=click.Path(path_type=Path),
|
|
1278
|
+
help="Output file (default: modify in place)",
|
|
1279
|
+
)
|
|
1280
|
+
def set_value_cmd(
|
|
1281
|
+
file: Path,
|
|
1282
|
+
set_path: str,
|
|
1283
|
+
value: str | None,
|
|
1284
|
+
batch_values_json: str | None,
|
|
1285
|
+
output: Path | None,
|
|
1286
|
+
) -> None:
|
|
1287
|
+
"""Set a value at a specific path in a Unity YAML file.
|
|
1288
|
+
|
|
1289
|
+
Path Format:
|
|
1290
|
+
GameObject/ComponentType/property - Component property
|
|
1291
|
+
GameObject/property - GameObject property
|
|
1292
|
+
|
|
1293
|
+
Examples:
|
|
1294
|
+
|
|
1295
|
+
# Set Transform position
|
|
1296
|
+
unityflow set Player.prefab \\
|
|
1297
|
+
--path "Player/Transform/localPosition" \\
|
|
1298
|
+
--value '{"x": 0, "y": 5, "z": 0}'
|
|
1299
|
+
|
|
1300
|
+
# Set SpriteRenderer color
|
|
1301
|
+
unityflow set Player.prefab \\
|
|
1302
|
+
--path "Player/SpriteRenderer/m_Color" \\
|
|
1303
|
+
--value '{"r": 1, "g": 0, "b": 0, "a": 1}'
|
|
1304
|
+
|
|
1305
|
+
# Set Image sprite (with asset reference)
|
|
1306
|
+
unityflow set Scene.unity \\
|
|
1307
|
+
--path "Canvas/Panel/Button/Image/m_Sprite" \\
|
|
1308
|
+
--value "@Assets/Sprites/icon.png"
|
|
1309
|
+
|
|
1310
|
+
# Set GameObject name
|
|
1311
|
+
unityflow set Player.prefab \\
|
|
1312
|
+
--path "Player/name" \\
|
|
1313
|
+
--value '"NewName"'
|
|
1314
|
+
|
|
1315
|
+
# When multiple components of same type exist, use index
|
|
1316
|
+
unityflow set Scene.unity \\
|
|
1317
|
+
--path "Canvas/Panel/Image[1]/m_Color" \\
|
|
1318
|
+
--value '{"r": 0, "g": 1, "b": 0, "a": 1}'
|
|
1319
|
+
|
|
1320
|
+
# Set multiple fields at once (batch mode)
|
|
1321
|
+
unityflow set Scene.unity \\
|
|
1322
|
+
--path "Player/MonoBehaviour" \\
|
|
1323
|
+
--batch '{"speed": 5.0, "health": 100}'
|
|
1324
|
+
|
|
1325
|
+
Asset References:
|
|
1326
|
+
Use @ prefix to reference assets by path:
|
|
1327
|
+
"@Assets/Sprites/icon.png" -> Sprite reference
|
|
1328
|
+
"@Assets/Sprites/atlas.png:idle_0" -> Sub-sprite
|
|
1329
|
+
"@Assets/Prefabs/Enemy.prefab" -> Prefab reference
|
|
1330
|
+
"""
|
|
1331
|
+
import json
|
|
1332
|
+
|
|
1333
|
+
from unityflow.asset_resolver import (
|
|
1334
|
+
AssetTypeMismatchError,
|
|
1335
|
+
is_asset_reference,
|
|
1336
|
+
resolve_value,
|
|
1337
|
+
)
|
|
1338
|
+
from unityflow.parser import UnityYAMLDocument
|
|
1339
|
+
from unityflow.query import merge_values, set_value
|
|
1340
|
+
|
|
1341
|
+
# Count how many value modes are specified
|
|
1342
|
+
value_modes = sum(
|
|
1343
|
+
[
|
|
1344
|
+
value is not None,
|
|
1345
|
+
batch_values_json is not None,
|
|
1346
|
+
]
|
|
1347
|
+
)
|
|
1348
|
+
|
|
1349
|
+
# Validate options
|
|
1350
|
+
if value_modes == 0:
|
|
1351
|
+
click.echo("Error: One of --value or --batch is required", err=True)
|
|
1352
|
+
sys.exit(1)
|
|
1353
|
+
if value_modes > 1:
|
|
1354
|
+
click.echo("Error: Cannot use multiple value modes (--value, --batch)", err=True)
|
|
1355
|
+
sys.exit(1)
|
|
1356
|
+
|
|
1357
|
+
try:
|
|
1358
|
+
doc = UnityYAMLDocument.load(file)
|
|
1359
|
+
except Exception as e:
|
|
1360
|
+
click.echo(f"Error: Failed to load {file}: {e}", err=True)
|
|
1361
|
+
sys.exit(1)
|
|
1362
|
+
|
|
1363
|
+
output_path = output or file
|
|
1364
|
+
project_root = find_unity_project_root(file)
|
|
1365
|
+
|
|
1366
|
+
# Resolve path (convert "Player/Transform/localPosition" to "components/12345/localPosition")
|
|
1367
|
+
original_path = set_path
|
|
1368
|
+
resolved_path, error = _resolve_component_path(doc, set_path)
|
|
1369
|
+
if error:
|
|
1370
|
+
click.echo(f"Error: {error}", err=True)
|
|
1371
|
+
sys.exit(1)
|
|
1372
|
+
set_path = resolved_path
|
|
1373
|
+
|
|
1374
|
+
# Extract field name from path for type validation
|
|
1375
|
+
# e.g., "components/12345/m_Sprite" -> "m_Sprite"
|
|
1376
|
+
field_name = set_path.rsplit("/", 1)[-1] if "/" in set_path else set_path
|
|
1377
|
+
|
|
1378
|
+
if batch_values_json is not None:
|
|
1379
|
+
# Batch mode - field names are the dict keys
|
|
1380
|
+
try:
|
|
1381
|
+
parsed_values = json.loads(batch_values_json)
|
|
1382
|
+
except json.JSONDecodeError as e:
|
|
1383
|
+
click.echo(f"Error: Invalid JSON for --batch: {e}", err=True)
|
|
1384
|
+
sys.exit(1)
|
|
1385
|
+
|
|
1386
|
+
if not isinstance(parsed_values, dict):
|
|
1387
|
+
click.echo("Error: --batch value must be a JSON object", err=True)
|
|
1388
|
+
sys.exit(1)
|
|
1389
|
+
|
|
1390
|
+
# Validate field types in batch values
|
|
1391
|
+
for batch_field_name, batch_value in parsed_values.items():
|
|
1392
|
+
is_valid, error_msg = _validate_field_value(batch_field_name, batch_value)
|
|
1393
|
+
if not is_valid:
|
|
1394
|
+
click.echo(f"Error: {error_msg}", err=True)
|
|
1395
|
+
sys.exit(1)
|
|
1396
|
+
|
|
1397
|
+
# Resolve asset references in batch values (keys are used as field names)
|
|
1398
|
+
try:
|
|
1399
|
+
resolved_values = resolve_value(parsed_values, project_root)
|
|
1400
|
+
except AssetTypeMismatchError as e:
|
|
1401
|
+
click.echo(f"Error: {e}", err=True)
|
|
1402
|
+
sys.exit(1)
|
|
1403
|
+
except ValueError as e:
|
|
1404
|
+
click.echo(f"Error: {e}", err=True)
|
|
1405
|
+
sys.exit(1)
|
|
1406
|
+
|
|
1407
|
+
updated, created = merge_values(doc, set_path, resolved_values, create=True)
|
|
1408
|
+
|
|
1409
|
+
if updated == 0 and created == 0:
|
|
1410
|
+
click.echo(f"Error: Path not found or no fields set: {original_path}", err=True)
|
|
1411
|
+
sys.exit(1)
|
|
1412
|
+
|
|
1413
|
+
doc.save(output_path)
|
|
1414
|
+
click.echo(f"Set {updated + created} fields at {original_path}")
|
|
1415
|
+
click.echo(f" Updated: {updated}, Created: {created}")
|
|
1416
|
+
|
|
1417
|
+
else:
|
|
1418
|
+
# Single value mode
|
|
1419
|
+
try:
|
|
1420
|
+
parsed_value = json.loads(value)
|
|
1421
|
+
except json.JSONDecodeError:
|
|
1422
|
+
parsed_value = value
|
|
1423
|
+
|
|
1424
|
+
# Validate field type
|
|
1425
|
+
is_valid, error_msg = _validate_field_value(field_name, parsed_value)
|
|
1426
|
+
if not is_valid:
|
|
1427
|
+
click.echo(f"Error: {error_msg}", err=True)
|
|
1428
|
+
sys.exit(1)
|
|
1429
|
+
|
|
1430
|
+
# Resolve asset references with field name for type validation
|
|
1431
|
+
try:
|
|
1432
|
+
resolved_value = resolve_value(parsed_value, project_root, field_name=field_name)
|
|
1433
|
+
except AssetTypeMismatchError as e:
|
|
1434
|
+
click.echo(f"Error: {e}", err=True)
|
|
1435
|
+
sys.exit(1)
|
|
1436
|
+
except ValueError as e:
|
|
1437
|
+
click.echo(f"Error: {e}", err=True)
|
|
1438
|
+
sys.exit(1)
|
|
1439
|
+
|
|
1440
|
+
# Show resolved asset info if it was an asset reference
|
|
1441
|
+
is_asset_ref = is_asset_reference(value) if isinstance(value, str) else False
|
|
1442
|
+
|
|
1443
|
+
if set_value(doc, set_path, resolved_value, create=True):
|
|
1444
|
+
doc.save(output_path)
|
|
1445
|
+
if is_asset_ref:
|
|
1446
|
+
click.echo(f"Set {original_path} = {value[1:]}") # Remove @ prefix for display
|
|
1447
|
+
else:
|
|
1448
|
+
click.echo(f"Set {original_path} = {value}")
|
|
1449
|
+
else:
|
|
1450
|
+
click.echo(f"Error: Path not found: {original_path}", err=True)
|
|
1451
|
+
sys.exit(1)
|
|
1452
|
+
|
|
1453
|
+
if output:
|
|
1454
|
+
click.echo(f"Saved to: {output}")
|
|
1455
|
+
|
|
1456
|
+
|
|
1457
|
+
@main.command(name="git-textconv")
|
|
1458
|
+
@click.argument("file", type=click.Path(exists=True, path_type=Path))
|
|
1459
|
+
def git_textconv(file: Path) -> None:
|
|
1460
|
+
"""Output normalized content for git diff textconv.
|
|
1461
|
+
|
|
1462
|
+
This command is designed to be used as a git textconv filter.
|
|
1463
|
+
It outputs the normalized YAML to stdout for git to compare.
|
|
1464
|
+
|
|
1465
|
+
Setup in .gitconfig:
|
|
1466
|
+
|
|
1467
|
+
[diff "unity"]
|
|
1468
|
+
textconv = unityflow git-textconv
|
|
1469
|
+
|
|
1470
|
+
Setup in .gitattributes:
|
|
1471
|
+
|
|
1472
|
+
*.prefab diff=unity
|
|
1473
|
+
*.unity diff=unity
|
|
1474
|
+
*.asset diff=unity
|
|
1475
|
+
"""
|
|
1476
|
+
normalizer = UnityPrefabNormalizer()
|
|
1477
|
+
|
|
1478
|
+
try:
|
|
1479
|
+
content = normalizer.normalize_file(file)
|
|
1480
|
+
# Output to stdout without trailing message
|
|
1481
|
+
sys.stdout.write(content)
|
|
1482
|
+
except Exception as e:
|
|
1483
|
+
# On error, output original file content so git can still diff
|
|
1484
|
+
click.echo(f"# Error normalizing: {e}", err=True)
|
|
1485
|
+
sys.stdout.write(file.read_text(encoding="utf-8"))
|
|
1486
|
+
|
|
1487
|
+
|
|
1488
|
+
@main.command(name="merge")
|
|
1489
|
+
@click.argument("base", type=click.Path(exists=True, path_type=Path))
|
|
1490
|
+
@click.argument("ours", type=click.Path(exists=True, path_type=Path))
|
|
1491
|
+
@click.argument("theirs", type=click.Path(exists=True, path_type=Path))
|
|
1492
|
+
@click.option(
|
|
1493
|
+
"-o",
|
|
1494
|
+
"--output",
|
|
1495
|
+
type=click.Path(path_type=Path),
|
|
1496
|
+
help="Output file (default: write to 'ours' file for git merge driver)",
|
|
1497
|
+
)
|
|
1498
|
+
@click.option(
|
|
1499
|
+
"--path",
|
|
1500
|
+
"file_path",
|
|
1501
|
+
help="Original file path (for git merge driver %P)",
|
|
1502
|
+
)
|
|
1503
|
+
def merge_files(
|
|
1504
|
+
base: Path,
|
|
1505
|
+
ours: Path,
|
|
1506
|
+
theirs: Path,
|
|
1507
|
+
output: Path | None,
|
|
1508
|
+
file_path: str | None,
|
|
1509
|
+
) -> None:
|
|
1510
|
+
"""Three-way merge of Unity YAML files.
|
|
1511
|
+
|
|
1512
|
+
This command is designed to work as a git merge driver.
|
|
1513
|
+
|
|
1514
|
+
BASE is the common ancestor file (%O).
|
|
1515
|
+
OURS is the current branch version (%A).
|
|
1516
|
+
THEIRS is the version being merged (%B).
|
|
1517
|
+
|
|
1518
|
+
Exit codes:
|
|
1519
|
+
0 = merge successful
|
|
1520
|
+
1 = conflict (manual resolution needed)
|
|
1521
|
+
|
|
1522
|
+
Setup in .gitconfig:
|
|
1523
|
+
|
|
1524
|
+
[merge "unity"]
|
|
1525
|
+
name = Unity YAML Merge
|
|
1526
|
+
driver = unityflow merge %O %A %B -o %A --path %P
|
|
1527
|
+
|
|
1528
|
+
Setup in .gitattributes:
|
|
1529
|
+
|
|
1530
|
+
*.prefab merge=unity
|
|
1531
|
+
*.unity merge=unity
|
|
1532
|
+
*.asset merge=unity
|
|
1533
|
+
"""
|
|
1534
|
+
from unityflow.merge import three_way_merge
|
|
1535
|
+
|
|
1536
|
+
normalizer = UnityPrefabNormalizer()
|
|
1537
|
+
|
|
1538
|
+
try:
|
|
1539
|
+
base_content = normalizer.normalize_file(base)
|
|
1540
|
+
ours_content = normalizer.normalize_file(ours)
|
|
1541
|
+
theirs_content = normalizer.normalize_file(theirs)
|
|
1542
|
+
except Exception as e:
|
|
1543
|
+
click.echo(f"Error: Failed to normalize files: {e}", err=True)
|
|
1544
|
+
sys.exit(1)
|
|
1545
|
+
|
|
1546
|
+
# Perform 3-way merge
|
|
1547
|
+
result, has_conflict = three_way_merge(base_content, ours_content, theirs_content)
|
|
1548
|
+
|
|
1549
|
+
output_path = output or ours
|
|
1550
|
+
output_path.write_text(result, encoding="utf-8", newline="\n")
|
|
1551
|
+
|
|
1552
|
+
display_path = file_path or str(output_path)
|
|
1553
|
+
|
|
1554
|
+
if has_conflict:
|
|
1555
|
+
click.echo(f"Conflict: {display_path} (manual resolution needed)", err=True)
|
|
1556
|
+
sys.exit(1)
|
|
1557
|
+
else:
|
|
1558
|
+
# Silent success for git integration (git expects no output on success)
|
|
1559
|
+
sys.exit(0)
|
|
1560
|
+
|
|
1561
|
+
|
|
1562
|
+
@main.command(name="setup")
|
|
1563
|
+
@click.option(
|
|
1564
|
+
"--global",
|
|
1565
|
+
"use_global",
|
|
1566
|
+
is_flag=True,
|
|
1567
|
+
help="Configure globally (~/.gitconfig) instead of locally",
|
|
1568
|
+
)
|
|
1569
|
+
@click.option(
|
|
1570
|
+
"--with-hooks",
|
|
1571
|
+
is_flag=True,
|
|
1572
|
+
help="Also install pre-commit hooks (native git hooks)",
|
|
1573
|
+
)
|
|
1574
|
+
@click.option(
|
|
1575
|
+
"--with-pre-commit",
|
|
1576
|
+
is_flag=True,
|
|
1577
|
+
help="Also install pre-commit framework hooks",
|
|
1578
|
+
)
|
|
1579
|
+
@click.option(
|
|
1580
|
+
"--with-difftool",
|
|
1581
|
+
is_flag=True,
|
|
1582
|
+
help="Also configure git difftool for Git Fork and other GUI clients",
|
|
1583
|
+
)
|
|
1584
|
+
@click.option(
|
|
1585
|
+
"--difftool-backend",
|
|
1586
|
+
type=click.Choice(["vscode", "meld", "kdiff3", "opendiff", "html", "auto"]),
|
|
1587
|
+
default="auto",
|
|
1588
|
+
help="Backend for difftool (default: auto-detect)",
|
|
1589
|
+
)
|
|
1590
|
+
@click.option(
|
|
1591
|
+
"--force",
|
|
1592
|
+
is_flag=True,
|
|
1593
|
+
help="Overwrite existing configuration",
|
|
1594
|
+
)
|
|
1595
|
+
def setup(
|
|
1596
|
+
use_global: bool,
|
|
1597
|
+
with_hooks: bool,
|
|
1598
|
+
with_pre_commit: bool,
|
|
1599
|
+
with_difftool: bool,
|
|
1600
|
+
difftool_backend: str,
|
|
1601
|
+
force: bool,
|
|
1602
|
+
) -> None:
|
|
1603
|
+
"""Set up Git integration with a single command.
|
|
1604
|
+
|
|
1605
|
+
Configures git diff/merge drivers and .gitattributes for Unity files.
|
|
1606
|
+
Run this from your Unity project root.
|
|
1607
|
+
|
|
1608
|
+
Examples:
|
|
1609
|
+
|
|
1610
|
+
# Basic setup (local to current repo)
|
|
1611
|
+
unityflow setup
|
|
1612
|
+
|
|
1613
|
+
# Global setup (applies to all repos)
|
|
1614
|
+
unityflow setup --global
|
|
1615
|
+
|
|
1616
|
+
# Setup with pre-commit hooks
|
|
1617
|
+
unityflow setup --with-hooks
|
|
1618
|
+
|
|
1619
|
+
# Setup with pre-commit framework
|
|
1620
|
+
unityflow setup --with-pre-commit
|
|
1621
|
+
|
|
1622
|
+
# Setup with difftool for Git Fork
|
|
1623
|
+
unityflow setup --with-difftool
|
|
1624
|
+
|
|
1625
|
+
# Setup difftool with specific backend
|
|
1626
|
+
unityflow setup --with-difftool --difftool-backend vscode
|
|
1627
|
+
"""
|
|
1628
|
+
import subprocess
|
|
1629
|
+
|
|
1630
|
+
click.echo("=== unityflow Git Integration Setup ===")
|
|
1631
|
+
click.echo()
|
|
1632
|
+
|
|
1633
|
+
# Check if we're in a git repo (required for local setup)
|
|
1634
|
+
if not use_global and not is_git_repository():
|
|
1635
|
+
click.echo("Error: Not in a git repository", err=True)
|
|
1636
|
+
click.echo("Run from your Unity project root, or use --global", err=True)
|
|
1637
|
+
sys.exit(1)
|
|
1638
|
+
|
|
1639
|
+
repo_root = get_repo_root() if not use_global else None
|
|
1640
|
+
|
|
1641
|
+
# Determine git config scope
|
|
1642
|
+
if use_global:
|
|
1643
|
+
click.echo("Setting up GLOBAL git configuration...")
|
|
1644
|
+
git_config_cmd = ["git", "config", "--global"]
|
|
1645
|
+
else:
|
|
1646
|
+
click.echo("Setting up LOCAL git configuration...")
|
|
1647
|
+
git_config_cmd = ["git", "config"]
|
|
1648
|
+
|
|
1649
|
+
# Configure diff driver
|
|
1650
|
+
click.echo(" Configuring diff driver...")
|
|
1651
|
+
subprocess.run([*git_config_cmd, "diff.unity.textconv", "unityflow git-textconv"], check=True)
|
|
1652
|
+
subprocess.run([*git_config_cmd, "diff.unity.cachetextconv", "true"], check=True)
|
|
1653
|
+
|
|
1654
|
+
# Configure merge driver
|
|
1655
|
+
click.echo(" Configuring merge driver...")
|
|
1656
|
+
merge_name = "Unity YAML Merge (unityflow)"
|
|
1657
|
+
merge_driver = "unityflow merge %O %A %B -o %A --path %P"
|
|
1658
|
+
subprocess.run([*git_config_cmd, "merge.unity.name", merge_name], check=True)
|
|
1659
|
+
subprocess.run([*git_config_cmd, "merge.unity.driver", merge_driver], check=True)
|
|
1660
|
+
subprocess.run([*git_config_cmd, "merge.unity.recursive", "binary"], check=True)
|
|
1661
|
+
|
|
1662
|
+
# Configure difftool (for Git Fork and other GUI clients)
|
|
1663
|
+
if with_difftool:
|
|
1664
|
+
click.echo(" Configuring difftool...")
|
|
1665
|
+
|
|
1666
|
+
# Determine backend option
|
|
1667
|
+
if difftool_backend == "auto":
|
|
1668
|
+
backend_arg = ""
|
|
1669
|
+
else:
|
|
1670
|
+
backend_arg = f" --tool {difftool_backend}"
|
|
1671
|
+
|
|
1672
|
+
# Set up difftool
|
|
1673
|
+
subprocess.run([*git_config_cmd, "diff.tool", "prefab-unity"], check=True)
|
|
1674
|
+
difftool_cmd = f'unityflow difftool{backend_arg} "$LOCAL" "$REMOTE"'
|
|
1675
|
+
subprocess.run(
|
|
1676
|
+
[*git_config_cmd, "difftool.prefab-unity.cmd", difftool_cmd],
|
|
1677
|
+
check=True,
|
|
1678
|
+
)
|
|
1679
|
+
|
|
1680
|
+
# Also configure for Unity file types specifically
|
|
1681
|
+
subprocess.run([*git_config_cmd, "difftool.prompt", "false"], check=True)
|
|
1682
|
+
|
|
1683
|
+
click.echo(" Difftool configured for Git Fork and GUI clients")
|
|
1684
|
+
click.echo()
|
|
1685
|
+
click.echo(" Git Fork setup:")
|
|
1686
|
+
click.echo(" 1. Open Git Fork → Repository → Settings → Git Config")
|
|
1687
|
+
click.echo(" 2. Or use: git difftool <file>")
|
|
1688
|
+
click.echo()
|
|
1689
|
+
|
|
1690
|
+
click.echo()
|
|
1691
|
+
|
|
1692
|
+
# Setup .gitattributes (only for local setup)
|
|
1693
|
+
if not use_global and repo_root:
|
|
1694
|
+
gitattributes_path = repo_root / ".gitattributes"
|
|
1695
|
+
gitattributes_content = """\
|
|
1696
|
+
# Unity YAML files - use unityflow for diff and merge
|
|
1697
|
+
*.prefab diff=unity merge=unity text eol=lf
|
|
1698
|
+
*.unity diff=unity merge=unity text eol=lf
|
|
1699
|
+
*.asset diff=unity merge=unity text eol=lf
|
|
1700
|
+
*.mat diff=unity merge=unity text eol=lf
|
|
1701
|
+
*.controller diff=unity merge=unity text eol=lf
|
|
1702
|
+
*.anim diff=unity merge=unity text eol=lf
|
|
1703
|
+
*.overrideController diff=unity merge=unity text eol=lf
|
|
1704
|
+
*.playable diff=unity merge=unity text eol=lf
|
|
1705
|
+
*.mask diff=unity merge=unity text eol=lf
|
|
1706
|
+
*.signal diff=unity merge=unity text eol=lf
|
|
1707
|
+
*.renderTexture diff=unity merge=unity text eol=lf
|
|
1708
|
+
*.flare diff=unity merge=unity text eol=lf
|
|
1709
|
+
*.shadervariants diff=unity merge=unity text eol=lf
|
|
1710
|
+
*.spriteatlas diff=unity merge=unity text eol=lf
|
|
1711
|
+
*.cubemap diff=unity merge=unity text eol=lf
|
|
1712
|
+
*.physicMaterial diff=unity merge=unity text eol=lf
|
|
1713
|
+
*.physicsMaterial2D diff=unity merge=unity text eol=lf
|
|
1714
|
+
*.terrainlayer diff=unity merge=unity text eol=lf
|
|
1715
|
+
*.brush diff=unity merge=unity text eol=lf
|
|
1716
|
+
*.mixer diff=unity merge=unity text eol=lf
|
|
1717
|
+
*.guiskin diff=unity merge=unity text eol=lf
|
|
1718
|
+
*.fontsettings diff=unity merge=unity text eol=lf
|
|
1719
|
+
*.preset diff=unity merge=unity text eol=lf
|
|
1720
|
+
*.giparams diff=unity merge=unity text eol=lf
|
|
1721
|
+
|
|
1722
|
+
# Unity meta files
|
|
1723
|
+
*.meta text eol=lf
|
|
1724
|
+
"""
|
|
1725
|
+
|
|
1726
|
+
if gitattributes_path.exists():
|
|
1727
|
+
existing = gitattributes_path.read_text()
|
|
1728
|
+
if "diff=unity" in existing:
|
|
1729
|
+
click.echo(" .gitattributes already configured")
|
|
1730
|
+
else:
|
|
1731
|
+
click.echo(" Appending to .gitattributes...")
|
|
1732
|
+
with open(gitattributes_path, "a") as f:
|
|
1733
|
+
f.write("\n" + gitattributes_content)
|
|
1734
|
+
else:
|
|
1735
|
+
click.echo(" Creating .gitattributes...")
|
|
1736
|
+
gitattributes_path.write_text(gitattributes_content)
|
|
1737
|
+
|
|
1738
|
+
# Setup .gitignore for .unityflow cache directory
|
|
1739
|
+
gitignore_path = repo_root / ".gitignore"
|
|
1740
|
+
unityflow_ignore_entry = ".unityflow/"
|
|
1741
|
+
|
|
1742
|
+
if gitignore_path.exists():
|
|
1743
|
+
existing_gitignore = gitignore_path.read_text()
|
|
1744
|
+
if unityflow_ignore_entry in existing_gitignore or ".unityflow" in existing_gitignore:
|
|
1745
|
+
click.echo(" .gitignore already includes .unityflow/")
|
|
1746
|
+
else:
|
|
1747
|
+
click.echo(" Adding .unityflow/ to .gitignore...")
|
|
1748
|
+
with open(gitignore_path, "a") as f:
|
|
1749
|
+
f.write(f"\n# unityflow cache\n{unityflow_ignore_entry}\n")
|
|
1750
|
+
else:
|
|
1751
|
+
click.echo(" Creating .gitignore with .unityflow/...")
|
|
1752
|
+
gitignore_path.write_text(f"# unityflow cache\n{unityflow_ignore_entry}\n")
|
|
1753
|
+
|
|
1754
|
+
# Install hooks if requested
|
|
1755
|
+
if with_hooks and repo_root:
|
|
1756
|
+
click.echo()
|
|
1757
|
+
click.echo("Installing git pre-commit hook...")
|
|
1758
|
+
hooks_dir = repo_root / ".git" / "hooks"
|
|
1759
|
+
hook_path = hooks_dir / "pre-commit"
|
|
1760
|
+
|
|
1761
|
+
if hook_path.exists() and not force:
|
|
1762
|
+
click.echo(f" Warning: Hook already exists at {hook_path}", err=True)
|
|
1763
|
+
click.echo(" Use --force to overwrite", err=True)
|
|
1764
|
+
else:
|
|
1765
|
+
hook_content = """\
|
|
1766
|
+
#!/bin/bash
|
|
1767
|
+
# unityflow pre-commit hook
|
|
1768
|
+
# Automatically normalize Unity YAML files before commit
|
|
1769
|
+
|
|
1770
|
+
set -e
|
|
1771
|
+
|
|
1772
|
+
# Get list of staged Unity files
|
|
1773
|
+
STAGED_FILES=$(git diff --cached --name-only --diff-filter=ACM | \\
|
|
1774
|
+
grep -E '\\.(prefab|unity|asset)$' || true)
|
|
1775
|
+
|
|
1776
|
+
if [ -n "$STAGED_FILES" ]; then
|
|
1777
|
+
echo "Normalizing Unity files..."
|
|
1778
|
+
|
|
1779
|
+
for file in $STAGED_FILES; do
|
|
1780
|
+
if [ -f "$file" ]; then
|
|
1781
|
+
unityflow normalize "$file" --in-place
|
|
1782
|
+
git add "$file"
|
|
1783
|
+
fi
|
|
1784
|
+
done
|
|
1785
|
+
|
|
1786
|
+
echo "Unity files normalized."
|
|
1787
|
+
fi
|
|
1788
|
+
"""
|
|
1789
|
+
hook_path.write_text(hook_content)
|
|
1790
|
+
hook_path.chmod(0o755)
|
|
1791
|
+
click.echo(f" Created: {hook_path}")
|
|
1792
|
+
|
|
1793
|
+
if with_pre_commit and repo_root:
|
|
1794
|
+
click.echo()
|
|
1795
|
+
click.echo("Setting up pre-commit framework...")
|
|
1796
|
+
|
|
1797
|
+
# Check if pre-commit is installed
|
|
1798
|
+
try:
|
|
1799
|
+
subprocess.run(["pre-commit", "--version"], capture_output=True, check=True)
|
|
1800
|
+
except (subprocess.CalledProcessError, FileNotFoundError):
|
|
1801
|
+
click.echo(" Error: pre-commit is not installed", err=True)
|
|
1802
|
+
click.echo(" Install it with: pip install pre-commit", err=True)
|
|
1803
|
+
sys.exit(1)
|
|
1804
|
+
|
|
1805
|
+
config_path = repo_root / ".pre-commit-config.yaml"
|
|
1806
|
+
config_content = """\
|
|
1807
|
+
# See https://pre-commit.com for more information
|
|
1808
|
+
repos:
|
|
1809
|
+
# Unity Prefab Normalizer
|
|
1810
|
+
- repo: https://github.com/TrueCyan/unityflow
|
|
1811
|
+
rev: v0.1.0
|
|
1812
|
+
hooks:
|
|
1813
|
+
- id: prefab-normalize
|
|
1814
|
+
# - id: prefab-validate # Optional: add validation
|
|
1815
|
+
"""
|
|
1816
|
+
|
|
1817
|
+
if config_path.exists() and not force:
|
|
1818
|
+
existing = config_path.read_text()
|
|
1819
|
+
if "unityflow" in existing:
|
|
1820
|
+
click.echo(" pre-commit already configured for unityflow")
|
|
1821
|
+
else:
|
|
1822
|
+
click.echo(" Warning: .pre-commit-config.yaml exists", err=True)
|
|
1823
|
+
click.echo(" Use --force to overwrite", err=True)
|
|
1824
|
+
else:
|
|
1825
|
+
config_path.write_text(config_content)
|
|
1826
|
+
click.echo(f" Created: {config_path}")
|
|
1827
|
+
|
|
1828
|
+
try:
|
|
1829
|
+
subprocess.run(["pre-commit", "install"], cwd=repo_root, check=True)
|
|
1830
|
+
click.echo(" Installed pre-commit hooks")
|
|
1831
|
+
except subprocess.CalledProcessError:
|
|
1832
|
+
click.echo(" Warning: Failed to run 'pre-commit install'", err=True)
|
|
1833
|
+
|
|
1834
|
+
click.echo()
|
|
1835
|
+
click.echo("=== Setup Complete ===")
|
|
1836
|
+
click.echo()
|
|
1837
|
+
click.echo("Git is now configured to use unityflow for Unity files.")
|
|
1838
|
+
click.echo()
|
|
1839
|
+
click.echo("Test with:")
|
|
1840
|
+
click.echo(" git diff HEAD~1 -- '*.prefab'")
|
|
1841
|
+
click.echo()
|
|
1842
|
+
|
|
1843
|
+
|
|
1844
|
+
@main.command(name="hierarchy")
|
|
1845
|
+
@click.argument("file", type=click.Path(exists=True, path_type=Path))
|
|
1846
|
+
@click.option(
|
|
1847
|
+
"--depth",
|
|
1848
|
+
"-d",
|
|
1849
|
+
type=int,
|
|
1850
|
+
default=None,
|
|
1851
|
+
help="Maximum depth to display (default: unlimited)",
|
|
1852
|
+
)
|
|
1853
|
+
@click.option(
|
|
1854
|
+
"--root",
|
|
1855
|
+
"-r",
|
|
1856
|
+
"root_path",
|
|
1857
|
+
type=str,
|
|
1858
|
+
default=None,
|
|
1859
|
+
help="Start from a specific object path (e.g., 'Player/Body')",
|
|
1860
|
+
)
|
|
1861
|
+
@click.option(
|
|
1862
|
+
"--no-components",
|
|
1863
|
+
is_flag=True,
|
|
1864
|
+
help="Hide component information",
|
|
1865
|
+
)
|
|
1866
|
+
@click.option(
|
|
1867
|
+
"--project-root",
|
|
1868
|
+
type=click.Path(exists=True, path_type=Path),
|
|
1869
|
+
help="Unity project root (auto-detected if not specified)",
|
|
1870
|
+
)
|
|
1871
|
+
@click.option(
|
|
1872
|
+
"--format",
|
|
1873
|
+
"output_format",
|
|
1874
|
+
type=click.Choice(["tree", "json"]),
|
|
1875
|
+
default="tree",
|
|
1876
|
+
help="Output format (default: tree)",
|
|
1877
|
+
)
|
|
1878
|
+
def hierarchy_cmd(
|
|
1879
|
+
file: Path,
|
|
1880
|
+
depth: int | None,
|
|
1881
|
+
root_path: str | None,
|
|
1882
|
+
no_components: bool,
|
|
1883
|
+
project_root: Path | None,
|
|
1884
|
+
output_format: str,
|
|
1885
|
+
) -> None:
|
|
1886
|
+
"""Show hierarchy structure of a Unity YAML file.
|
|
1887
|
+
|
|
1888
|
+
Displays the GameObject hierarchy in a tree format, showing:
|
|
1889
|
+
- Object names and parent-child relationships
|
|
1890
|
+
- Components attached to each object (with script names resolved)
|
|
1891
|
+
- Inactive objects marked with (inactive)
|
|
1892
|
+
- PrefabInstance nodes with their source prefab
|
|
1893
|
+
|
|
1894
|
+
Examples:
|
|
1895
|
+
|
|
1896
|
+
# Show full hierarchy
|
|
1897
|
+
unityflow hierarchy Player.prefab
|
|
1898
|
+
|
|
1899
|
+
# Limit depth
|
|
1900
|
+
unityflow hierarchy Scene.unity --depth 2
|
|
1901
|
+
|
|
1902
|
+
# Start from specific object
|
|
1903
|
+
unityflow hierarchy Player.prefab --root "Body/Armature"
|
|
1904
|
+
|
|
1905
|
+
# Hide components
|
|
1906
|
+
unityflow hierarchy Player.prefab --no-components
|
|
1907
|
+
|
|
1908
|
+
# Output as JSON
|
|
1909
|
+
unityflow hierarchy Player.prefab --format json
|
|
1910
|
+
"""
|
|
1911
|
+
import json as json_module
|
|
1912
|
+
|
|
1913
|
+
from unityflow import UnityYAMLDocument, build_hierarchy
|
|
1914
|
+
from unityflow.asset_tracker import find_unity_project_root, get_lazy_guid_index
|
|
1915
|
+
|
|
1916
|
+
# Load document
|
|
1917
|
+
try:
|
|
1918
|
+
doc = UnityYAMLDocument.load_auto(file)
|
|
1919
|
+
except Exception as e:
|
|
1920
|
+
click.echo(f"Error: Failed to load file: {e}", err=True)
|
|
1921
|
+
sys.exit(1)
|
|
1922
|
+
|
|
1923
|
+
# Find project root and build GUID index
|
|
1924
|
+
resolved_project_root = project_root
|
|
1925
|
+
if resolved_project_root is None:
|
|
1926
|
+
resolved_project_root = find_unity_project_root(file)
|
|
1927
|
+
|
|
1928
|
+
guid_index = None
|
|
1929
|
+
if resolved_project_root:
|
|
1930
|
+
try:
|
|
1931
|
+
guid_index = get_lazy_guid_index(resolved_project_root, include_packages=True)
|
|
1932
|
+
except Exception:
|
|
1933
|
+
pass # Continue without GUID index
|
|
1934
|
+
|
|
1935
|
+
# Build hierarchy
|
|
1936
|
+
try:
|
|
1937
|
+
hier = build_hierarchy(doc, guid_index=guid_index)
|
|
1938
|
+
except Exception as e:
|
|
1939
|
+
click.echo(f"Error: Failed to build hierarchy: {e}", err=True)
|
|
1940
|
+
sys.exit(1)
|
|
1941
|
+
|
|
1942
|
+
# Find starting node if root_path specified
|
|
1943
|
+
root_nodes = hier.root_objects
|
|
1944
|
+
if root_path:
|
|
1945
|
+
found = hier.find(root_path)
|
|
1946
|
+
if found is None:
|
|
1947
|
+
click.echo(f"Error: Object not found: {root_path}", err=True)
|
|
1948
|
+
sys.exit(1)
|
|
1949
|
+
root_nodes = [found]
|
|
1950
|
+
|
|
1951
|
+
# Helper function to get active state from document
|
|
1952
|
+
def get_active_state(node) -> bool:
|
|
1953
|
+
"""Get the active state of a node from the document."""
|
|
1954
|
+
if node._document is None:
|
|
1955
|
+
return True
|
|
1956
|
+
go_obj = node._document.get_by_file_id(node.file_id)
|
|
1957
|
+
if go_obj and go_obj.class_id == 1: # GameObject
|
|
1958
|
+
content = go_obj.get_content()
|
|
1959
|
+
if content:
|
|
1960
|
+
return content.get("m_IsActive", 1) == 1
|
|
1961
|
+
return True
|
|
1962
|
+
|
|
1963
|
+
# Output
|
|
1964
|
+
if output_format == "json":
|
|
1965
|
+
|
|
1966
|
+
def node_to_dict(node, current_depth: int = 0):
|
|
1967
|
+
result = {
|
|
1968
|
+
"name": node.name,
|
|
1969
|
+
"path": node.path,
|
|
1970
|
+
"active": get_active_state(node),
|
|
1971
|
+
}
|
|
1972
|
+
if not no_components and node.components:
|
|
1973
|
+
result["components"] = [
|
|
1974
|
+
{
|
|
1975
|
+
"type": c.script_name or c.class_name,
|
|
1976
|
+
"class_id": c.class_id,
|
|
1977
|
+
}
|
|
1978
|
+
for c in node.components
|
|
1979
|
+
]
|
|
1980
|
+
if node.is_prefab_instance:
|
|
1981
|
+
result["is_prefab_instance"] = True
|
|
1982
|
+
if node.source_guid:
|
|
1983
|
+
result["source_guid"] = node.source_guid
|
|
1984
|
+
|
|
1985
|
+
if depth is None or current_depth < depth:
|
|
1986
|
+
if node.children:
|
|
1987
|
+
result["children"] = [node_to_dict(child, current_depth + 1) for child in node.children]
|
|
1988
|
+
return result
|
|
1989
|
+
|
|
1990
|
+
output_data = [node_to_dict(n) for n in root_nodes]
|
|
1991
|
+
click.echo(json_module.dumps(output_data, indent=2))
|
|
1992
|
+
else:
|
|
1993
|
+
# Tree output
|
|
1994
|
+
def print_tree(node, prefix: str = "", is_last: bool = True, current_depth: int = 0):
|
|
1995
|
+
# Determine connector
|
|
1996
|
+
connector = "└── " if is_last else "├── "
|
|
1997
|
+
|
|
1998
|
+
# Build node line
|
|
1999
|
+
name = node.name
|
|
2000
|
+
if not get_active_state(node):
|
|
2001
|
+
name += " (inactive)"
|
|
2002
|
+
if node.is_prefab_instance:
|
|
2003
|
+
name += " [Prefab]"
|
|
2004
|
+
|
|
2005
|
+
# Component info
|
|
2006
|
+
comp_str = ""
|
|
2007
|
+
if not no_components and node.components:
|
|
2008
|
+
comp_names = []
|
|
2009
|
+
for c in node.components:
|
|
2010
|
+
if c.script_name:
|
|
2011
|
+
comp_names.append(c.script_name)
|
|
2012
|
+
elif c.class_name and c.class_name not in ("Transform", "RectTransform"):
|
|
2013
|
+
comp_names.append(c.class_name)
|
|
2014
|
+
if comp_names:
|
|
2015
|
+
comp_str = f" [{', '.join(comp_names)}]"
|
|
2016
|
+
|
|
2017
|
+
click.echo(f"{prefix}{connector}{name}{comp_str}")
|
|
2018
|
+
|
|
2019
|
+
# Check depth limit
|
|
2020
|
+
if depth is not None and current_depth >= depth:
|
|
2021
|
+
return
|
|
2022
|
+
|
|
2023
|
+
# Print children
|
|
2024
|
+
children = node.children
|
|
2025
|
+
child_prefix = prefix + (" " if is_last else "│ ")
|
|
2026
|
+
for i, child in enumerate(children):
|
|
2027
|
+
print_tree(child, child_prefix, i == len(children) - 1, current_depth + 1)
|
|
2028
|
+
|
|
2029
|
+
# Print header
|
|
2030
|
+
click.echo(f"Hierarchy: {file.name}")
|
|
2031
|
+
click.echo()
|
|
2032
|
+
|
|
2033
|
+
# Print each root
|
|
2034
|
+
for i, root in enumerate(root_nodes):
|
|
2035
|
+
is_last_root = i == len(root_nodes) - 1
|
|
2036
|
+
# Root node is special - no prefix
|
|
2037
|
+
name = root.name
|
|
2038
|
+
if not get_active_state(root):
|
|
2039
|
+
name += " (inactive)"
|
|
2040
|
+
if root.is_prefab_instance:
|
|
2041
|
+
name += " [Prefab]"
|
|
2042
|
+
|
|
2043
|
+
comp_str = ""
|
|
2044
|
+
if not no_components and root.components:
|
|
2045
|
+
comp_names = []
|
|
2046
|
+
for c in root.components:
|
|
2047
|
+
if c.script_name:
|
|
2048
|
+
comp_names.append(c.script_name)
|
|
2049
|
+
elif c.class_name and c.class_name not in ("Transform", "RectTransform"):
|
|
2050
|
+
comp_names.append(c.class_name)
|
|
2051
|
+
if comp_names:
|
|
2052
|
+
comp_str = f" [{', '.join(comp_names)}]"
|
|
2053
|
+
|
|
2054
|
+
click.echo(f"{name}{comp_str}")
|
|
2055
|
+
|
|
2056
|
+
# Print children
|
|
2057
|
+
children = root.children
|
|
2058
|
+
for j, child in enumerate(children):
|
|
2059
|
+
print_tree(child, "", j == len(children) - 1, 1)
|
|
2060
|
+
|
|
2061
|
+
if not is_last_root:
|
|
2062
|
+
click.echo()
|
|
2063
|
+
|
|
2064
|
+
|
|
2065
|
+
@main.command(name="inspect")
|
|
2066
|
+
@click.argument("file", type=click.Path(exists=True, path_type=Path))
|
|
2067
|
+
@click.argument("object_path", type=str, required=False, default=None)
|
|
2068
|
+
@click.option(
|
|
2069
|
+
"--project-root",
|
|
2070
|
+
type=click.Path(exists=True, path_type=Path),
|
|
2071
|
+
help="Unity project root (auto-detected if not specified)",
|
|
2072
|
+
)
|
|
2073
|
+
@click.option(
|
|
2074
|
+
"--format",
|
|
2075
|
+
"output_format",
|
|
2076
|
+
type=click.Choice(["text", "json"]),
|
|
2077
|
+
default="text",
|
|
2078
|
+
help="Output format (default: text)",
|
|
2079
|
+
)
|
|
2080
|
+
def inspect_cmd(
|
|
2081
|
+
file: Path,
|
|
2082
|
+
object_path: str | None,
|
|
2083
|
+
project_root: Path | None,
|
|
2084
|
+
output_format: str,
|
|
2085
|
+
) -> None:
|
|
2086
|
+
"""Inspect a GameObject or component in detail.
|
|
2087
|
+
|
|
2088
|
+
Shows detailed information about a specific GameObject including:
|
|
2089
|
+
- Name, path, and active state
|
|
2090
|
+
- Layer and tag
|
|
2091
|
+
- All components with their properties
|
|
2092
|
+
|
|
2093
|
+
If no object_path is provided, shows the root object(s).
|
|
2094
|
+
|
|
2095
|
+
Examples:
|
|
2096
|
+
|
|
2097
|
+
# Inspect root object
|
|
2098
|
+
unityflow inspect Player.prefab
|
|
2099
|
+
|
|
2100
|
+
# Inspect specific object by path
|
|
2101
|
+
unityflow inspect Player.prefab "Body/Armature/Spine"
|
|
2102
|
+
|
|
2103
|
+
# Output as JSON
|
|
2104
|
+
unityflow inspect Player.prefab "Canvas" --format json
|
|
2105
|
+
"""
|
|
2106
|
+
import json as json_module
|
|
2107
|
+
|
|
2108
|
+
from unityflow import UnityYAMLDocument, build_hierarchy
|
|
2109
|
+
from unityflow.asset_tracker import find_unity_project_root, get_lazy_guid_index
|
|
2110
|
+
|
|
2111
|
+
# Load document
|
|
2112
|
+
try:
|
|
2113
|
+
doc = UnityYAMLDocument.load_auto(file)
|
|
2114
|
+
except Exception as e:
|
|
2115
|
+
click.echo(f"Error: Failed to load file: {e}", err=True)
|
|
2116
|
+
sys.exit(1)
|
|
2117
|
+
|
|
2118
|
+
# Find project root and build GUID index
|
|
2119
|
+
resolved_project_root = project_root
|
|
2120
|
+
if resolved_project_root is None:
|
|
2121
|
+
resolved_project_root = find_unity_project_root(file)
|
|
2122
|
+
|
|
2123
|
+
guid_index = None
|
|
2124
|
+
if resolved_project_root:
|
|
2125
|
+
try:
|
|
2126
|
+
guid_index = get_lazy_guid_index(resolved_project_root, include_packages=True)
|
|
2127
|
+
except Exception:
|
|
2128
|
+
pass
|
|
2129
|
+
|
|
2130
|
+
# Build hierarchy
|
|
2131
|
+
try:
|
|
2132
|
+
hier = build_hierarchy(doc, guid_index=guid_index)
|
|
2133
|
+
except Exception as e:
|
|
2134
|
+
click.echo(f"Error: Failed to build hierarchy: {e}", err=True)
|
|
2135
|
+
sys.exit(1)
|
|
2136
|
+
|
|
2137
|
+
# Find target node
|
|
2138
|
+
if object_path:
|
|
2139
|
+
node = hier.find(object_path)
|
|
2140
|
+
if node is None:
|
|
2141
|
+
click.echo(f"Error: Object not found: {object_path}", err=True)
|
|
2142
|
+
sys.exit(1)
|
|
2143
|
+
else:
|
|
2144
|
+
# Use first root
|
|
2145
|
+
if not hier.root_objects:
|
|
2146
|
+
click.echo("Error: No root objects found", err=True)
|
|
2147
|
+
sys.exit(1)
|
|
2148
|
+
node = hier.root_objects[0]
|
|
2149
|
+
|
|
2150
|
+
# Get GameObject data
|
|
2151
|
+
go_obj = doc.get_by_file_id(node.file_id)
|
|
2152
|
+
go_content = go_obj.get_content() if go_obj else {}
|
|
2153
|
+
|
|
2154
|
+
# Get active state from GameObject content
|
|
2155
|
+
is_active = go_content.get("m_IsActive", 1) == 1
|
|
2156
|
+
|
|
2157
|
+
if output_format == "json":
|
|
2158
|
+
result = {
|
|
2159
|
+
"name": node.name,
|
|
2160
|
+
"path": node.path,
|
|
2161
|
+
"file_id": node.file_id,
|
|
2162
|
+
"active": is_active,
|
|
2163
|
+
"layer": go_content.get("m_Layer", 0),
|
|
2164
|
+
"tag": go_content.get("m_TagString", "Untagged"),
|
|
2165
|
+
"is_prefab_instance": node.is_prefab_instance,
|
|
2166
|
+
}
|
|
2167
|
+
if node.source_guid:
|
|
2168
|
+
result["source_guid"] = node.source_guid
|
|
2169
|
+
|
|
2170
|
+
# Add transform info
|
|
2171
|
+
if node.transform_id:
|
|
2172
|
+
transform_obj = doc.get_by_file_id(node.transform_id)
|
|
2173
|
+
if transform_obj:
|
|
2174
|
+
transform_content = transform_obj.get_content() or {}
|
|
2175
|
+
result["transform"] = {
|
|
2176
|
+
"type": "RectTransform" if transform_obj.class_id == 224 else "Transform",
|
|
2177
|
+
"localPosition": transform_content.get("m_LocalPosition"),
|
|
2178
|
+
"localRotation": transform_content.get("m_LocalRotation"),
|
|
2179
|
+
"localScale": transform_content.get("m_LocalScale"),
|
|
2180
|
+
}
|
|
2181
|
+
if transform_obj.class_id == 224:
|
|
2182
|
+
t = result["transform"]
|
|
2183
|
+
t["anchoredPosition"] = transform_content.get("m_AnchoredPosition")
|
|
2184
|
+
t["sizeDelta"] = transform_content.get("m_SizeDelta")
|
|
2185
|
+
t["anchorMin"] = transform_content.get("m_AnchorMin")
|
|
2186
|
+
t["anchorMax"] = transform_content.get("m_AnchorMax")
|
|
2187
|
+
t["pivot"] = transform_content.get("m_Pivot")
|
|
2188
|
+
|
|
2189
|
+
# Add components
|
|
2190
|
+
result["components"] = []
|
|
2191
|
+
for comp in node.components:
|
|
2192
|
+
comp_data = {
|
|
2193
|
+
"type": comp.script_name or comp.class_name,
|
|
2194
|
+
"class_id": comp.class_id,
|
|
2195
|
+
"file_id": comp.file_id,
|
|
2196
|
+
}
|
|
2197
|
+
if comp.script_guid:
|
|
2198
|
+
comp_data["script_guid"] = comp.script_guid
|
|
2199
|
+
# Include component properties
|
|
2200
|
+
comp_data["properties"] = comp.data
|
|
2201
|
+
result["components"].append(comp_data)
|
|
2202
|
+
|
|
2203
|
+
click.echo(json_module.dumps(result, indent=2, default=str))
|
|
2204
|
+
else:
|
|
2205
|
+
# Text output - Inspector-like format
|
|
2206
|
+
click.echo(f"GameObject: {node.name}")
|
|
2207
|
+
click.echo(f"Path: {node.path}")
|
|
2208
|
+
click.echo(f"FileID: {node.file_id}")
|
|
2209
|
+
click.echo(f"Active: {is_active}")
|
|
2210
|
+
click.echo(f"Layer: {go_content.get('m_Layer', 0)}")
|
|
2211
|
+
click.echo(f"Tag: {go_content.get('m_TagString', 'Untagged')}")
|
|
2212
|
+
|
|
2213
|
+
if node.is_prefab_instance:
|
|
2214
|
+
click.echo("Is Prefab Instance: Yes")
|
|
2215
|
+
if node.source_guid:
|
|
2216
|
+
click.echo(f"Source GUID: {node.source_guid}")
|
|
2217
|
+
|
|
2218
|
+
click.echo()
|
|
2219
|
+
|
|
2220
|
+
# Transform info
|
|
2221
|
+
if node.transform_id:
|
|
2222
|
+
transform_obj = doc.get_by_file_id(node.transform_id)
|
|
2223
|
+
if transform_obj:
|
|
2224
|
+
transform_content = transform_obj.get_content() or {}
|
|
2225
|
+
transform_type = "RectTransform" if transform_obj.class_id == 224 else "Transform"
|
|
2226
|
+
click.echo(f"[{transform_type}]")
|
|
2227
|
+
|
|
2228
|
+
def fmt_vec3(v: dict, dx=0, dy=0, dz=0) -> str:
|
|
2229
|
+
return f"({v.get('x', dx)}, {v.get('y', dy)}, {v.get('z', dz)})"
|
|
2230
|
+
|
|
2231
|
+
def fmt_vec4(v: dict, dx=0, dy=0, dz=0, dw=1) -> str:
|
|
2232
|
+
return f"({v.get('x', dx)}, {v.get('y', dy)}, {v.get('z', dz)}, {v.get('w', dw)})"
|
|
2233
|
+
|
|
2234
|
+
def fmt_vec2(v: dict, dx=0, dy=0) -> str:
|
|
2235
|
+
return f"({v.get('x', dx)}, {v.get('y', dy)})"
|
|
2236
|
+
|
|
2237
|
+
pos = transform_content.get("m_LocalPosition", {})
|
|
2238
|
+
if isinstance(pos, dict):
|
|
2239
|
+
click.echo(f" localPosition: {fmt_vec3(pos)}")
|
|
2240
|
+
|
|
2241
|
+
rot = transform_content.get("m_LocalRotation", {})
|
|
2242
|
+
if isinstance(rot, dict):
|
|
2243
|
+
click.echo(f" localRotation: {fmt_vec4(rot)}")
|
|
2244
|
+
|
|
2245
|
+
scale = transform_content.get("m_LocalScale", {})
|
|
2246
|
+
if isinstance(scale, dict):
|
|
2247
|
+
click.echo(f" localScale: {fmt_vec3(scale, 1, 1, 1)}")
|
|
2248
|
+
|
|
2249
|
+
# RectTransform specific
|
|
2250
|
+
if transform_obj.class_id == 224:
|
|
2251
|
+
anchor_pos = transform_content.get("m_AnchoredPosition", {})
|
|
2252
|
+
if isinstance(anchor_pos, dict):
|
|
2253
|
+
click.echo(f" anchoredPosition: {fmt_vec2(anchor_pos)}")
|
|
2254
|
+
|
|
2255
|
+
size = transform_content.get("m_SizeDelta", {})
|
|
2256
|
+
if isinstance(size, dict):
|
|
2257
|
+
click.echo(f" sizeDelta: {fmt_vec2(size)}")
|
|
2258
|
+
|
|
2259
|
+
anchor_min = transform_content.get("m_AnchorMin", {})
|
|
2260
|
+
if isinstance(anchor_min, dict):
|
|
2261
|
+
click.echo(f" anchorMin: {fmt_vec2(anchor_min)}")
|
|
2262
|
+
|
|
2263
|
+
anchor_max = transform_content.get("m_AnchorMax", {})
|
|
2264
|
+
if isinstance(anchor_max, dict):
|
|
2265
|
+
click.echo(f" anchorMax: {fmt_vec2(anchor_max)}")
|
|
2266
|
+
|
|
2267
|
+
pivot = transform_content.get("m_Pivot", {})
|
|
2268
|
+
if isinstance(pivot, dict):
|
|
2269
|
+
click.echo(f" pivot: {fmt_vec2(pivot, 0.5, 0.5)}")
|
|
2270
|
+
|
|
2271
|
+
click.echo()
|
|
2272
|
+
|
|
2273
|
+
# Other components
|
|
2274
|
+
for comp in node.components:
|
|
2275
|
+
comp_type = comp.script_name or comp.class_name
|
|
2276
|
+
click.echo(f"[{comp_type}]")
|
|
2277
|
+
|
|
2278
|
+
if comp.script_guid:
|
|
2279
|
+
click.echo(f" script_guid: {comp.script_guid}")
|
|
2280
|
+
|
|
2281
|
+
# Show key properties (excluding internal Unity fields)
|
|
2282
|
+
skip_keys = {
|
|
2283
|
+
"m_ObjectHideFlags",
|
|
2284
|
+
"m_CorrespondingSourceObject",
|
|
2285
|
+
"m_PrefabInstance",
|
|
2286
|
+
"m_PrefabAsset",
|
|
2287
|
+
"m_GameObject",
|
|
2288
|
+
"m_Enabled",
|
|
2289
|
+
"m_Script",
|
|
2290
|
+
}
|
|
2291
|
+
for key, value in comp.data.items():
|
|
2292
|
+
if key not in skip_keys:
|
|
2293
|
+
# Format value for display
|
|
2294
|
+
if isinstance(value, dict) and "fileID" in value:
|
|
2295
|
+
# Reference field
|
|
2296
|
+
file_id = value.get("fileID", 0)
|
|
2297
|
+
guid = value.get("guid", "")
|
|
2298
|
+
if guid:
|
|
2299
|
+
click.echo(f" {key}: (GUID: {guid}, fileID: {file_id})")
|
|
2300
|
+
elif file_id:
|
|
2301
|
+
click.echo(f" {key}: (fileID: {file_id})")
|
|
2302
|
+
else:
|
|
2303
|
+
click.echo(f" {key}: None")
|
|
2304
|
+
elif isinstance(value, (dict, list)):
|
|
2305
|
+
# Complex value - show abbreviated
|
|
2306
|
+
if isinstance(value, list):
|
|
2307
|
+
click.echo(f" {key}: [{len(value)} items]")
|
|
2308
|
+
else:
|
|
2309
|
+
click.echo(f" {key}: {{...}}")
|
|
2310
|
+
else:
|
|
2311
|
+
click.echo(f" {key}: {value}")
|
|
2312
|
+
|
|
2313
|
+
click.echo()
|
|
2314
|
+
|
|
2315
|
+
|
|
2316
|
+
if __name__ == "__main__":
|
|
2317
|
+
main()
|