wizard-codegen 0.1.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cli/__init__.py +28 -0
- cli/main.py +155 -0
- core/__init__.py +22 -0
- core/config.py +123 -0
- core/context_builder.py +321 -0
- core/filter.py +65 -0
- core/renderer.py +77 -0
- core/writer.py +70 -0
- hooks/__init__.py +6 -0
- hooks/hooks.py +28 -0
- proto/__init__.py +19 -0
- proto/discover.py +70 -0
- proto/fds_loader.py +51 -0
- proto/proto_source.py +119 -0
- proto/protoc_runner.py +55 -0
- utils/__init__.py +17 -0
- utils/name.py +69 -0
- wizard_codegen-0.1.5.dist-info/METADATA +1068 -0
- wizard_codegen-0.1.5.dist-info/RECORD +21 -0
- wizard_codegen-0.1.5.dist-info/WHEEL +4 -0
- wizard_codegen-0.1.5.dist-info/entry_points.txt +2 -0
cli/__init__.py
ADDED
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
# Re-export everything from main for backwards compatibility
|
|
2
|
+
from cli.main import (
|
|
3
|
+
app,
|
|
4
|
+
console,
|
|
5
|
+
Ctx,
|
|
6
|
+
main,
|
|
7
|
+
common,
|
|
8
|
+
generate,
|
|
9
|
+
list_protos,
|
|
10
|
+
validate,
|
|
11
|
+
_print_files_table,
|
|
12
|
+
_print_verbose_enabled,
|
|
13
|
+
_load_config,
|
|
14
|
+
)
|
|
15
|
+
|
|
16
|
+
__all__ = [
|
|
17
|
+
"app",
|
|
18
|
+
"console",
|
|
19
|
+
"Ctx",
|
|
20
|
+
"main",
|
|
21
|
+
"common",
|
|
22
|
+
"generate",
|
|
23
|
+
"list_protos",
|
|
24
|
+
"validate",
|
|
25
|
+
"_print_files_table",
|
|
26
|
+
"_print_verbose_enabled",
|
|
27
|
+
"_load_config",
|
|
28
|
+
]
|
cli/main.py
ADDED
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from rich.table import Table
|
|
7
|
+
from rich.panel import Panel
|
|
8
|
+
from rich.pretty import Pretty
|
|
9
|
+
from dataclasses import dataclass
|
|
10
|
+
from importlib.metadata import version as get_version
|
|
11
|
+
from typing import Optional
|
|
12
|
+
|
|
13
|
+
from core import *
|
|
14
|
+
from proto import *
|
|
15
|
+
import shutil
|
|
16
|
+
|
|
17
|
+
app = typer.Typer(no_args_is_help=True, rich_markup_mode="rich")
|
|
18
|
+
console = Console()
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def version_callback(value: bool):
|
|
22
|
+
if value:
|
|
23
|
+
ver = get_version("wizard-codegen")
|
|
24
|
+
console.print(f"wizard-codegen [bold cyan]{ver}[/]")
|
|
25
|
+
raise typer.Exit()
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class Ctx:
|
|
30
|
+
verbose: bool = False
|
|
31
|
+
dry_run: bool = False
|
|
32
|
+
local: bool = False
|
|
33
|
+
config_path: Path = Path("wizard/codegen.yaml")
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@app.callback()
|
|
37
|
+
def common(
|
|
38
|
+
ctx: typer.Context,
|
|
39
|
+
version: Optional[bool] = typer.Option(
|
|
40
|
+
None, "--version", "-V",
|
|
41
|
+
callback=version_callback,
|
|
42
|
+
is_eager=True,
|
|
43
|
+
help="Show version and exit",
|
|
44
|
+
),
|
|
45
|
+
verbose: bool = typer.Option(False, "--verbose", "-v", help="More logs"),
|
|
46
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Print actions without changing files"),
|
|
47
|
+
local: bool = typer.Option(False, "--local", "-l", help="Use local proto.root instead of git source"),
|
|
48
|
+
config_path: Path = typer.Option(
|
|
49
|
+
Path("wizard/codegen.yaml"),
|
|
50
|
+
"--config", "-c",
|
|
51
|
+
exists=True,
|
|
52
|
+
file_okay=True,
|
|
53
|
+
dir_okay=False,
|
|
54
|
+
readable=True,
|
|
55
|
+
help="Path to codegen config YAML",
|
|
56
|
+
),
|
|
57
|
+
):
|
|
58
|
+
ctx.obj = Ctx(verbose=verbose, dry_run=dry_run, local=local, config_path=config_path)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@app.command(help="Generate code from protos")
|
|
62
|
+
def generate(ctx: typer.Context):
|
|
63
|
+
_print_verbose_enabled(ctx)
|
|
64
|
+
config = _load_config(ctx.obj.config_path, ctx)
|
|
65
|
+
proto_root = resolve_proto_root(config, use_local=ctx.obj.local)
|
|
66
|
+
files = discover_proto_files(proto_root, config)
|
|
67
|
+
if ctx.obj.verbose:
|
|
68
|
+
_print_files_table(files, proto_root)
|
|
69
|
+
|
|
70
|
+
fds_path, tmp_dir = build_descriptor_set(config, proto_root, files, ctx.obj.verbose)
|
|
71
|
+
print_fds_content(fds_path, ctx, console)
|
|
72
|
+
fds = load_fds(fds_path)
|
|
73
|
+
jinja_ctx = build_context(config, fds)
|
|
74
|
+
print_build_context(jinja_ctx, ctx, console)
|
|
75
|
+
plan = render_all(config, jinja_ctx)
|
|
76
|
+
|
|
77
|
+
apply_plan(plan, ctx.obj.dry_run, ctx.obj.verbose)
|
|
78
|
+
|
|
79
|
+
# Cleanup Tmp Dir
|
|
80
|
+
if tmp_dir:
|
|
81
|
+
shutil.rmtree(tmp_dir)
|
|
82
|
+
|
|
83
|
+
if ctx.obj.dry_run:
|
|
84
|
+
console.print("[bold green]✓[/] Dry run finished")
|
|
85
|
+
else:
|
|
86
|
+
console.print("[bold green]✓[/] Generated code")
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
@app.command("list-protos", help="List available protos")
|
|
90
|
+
def list_protos(ctx: typer.Context):
|
|
91
|
+
_print_verbose_enabled(ctx)
|
|
92
|
+
config = _load_config(ctx.obj.config_path, ctx)
|
|
93
|
+
proto_root = resolve_proto_root(config, use_local=ctx.obj.local)
|
|
94
|
+
files = discover_proto_files(proto_root, config)
|
|
95
|
+
_print_files_table(files, proto_root)
|
|
96
|
+
|
|
97
|
+
|
|
98
|
+
@app.command(help="Validate the jinja2 templates, resolves proto root (git checkout), runs protoc descriptor build if needed, prints missing filters/variables")
|
|
99
|
+
def validate(ctx: typer.Context):
|
|
100
|
+
_print_verbose_enabled(ctx)
|
|
101
|
+
config = _load_config(ctx.obj.config_path, ctx)
|
|
102
|
+
proto_root = resolve_proto_root(config, use_local=ctx.obj.local)
|
|
103
|
+
files = discover_proto_files(proto_root, config)
|
|
104
|
+
|
|
105
|
+
if ctx.obj.verbose:
|
|
106
|
+
_print_files_table(files, proto_root)
|
|
107
|
+
|
|
108
|
+
fds_path, tmp_dir = build_descriptor_set(config, proto_root, files, ctx.obj.verbose)
|
|
109
|
+
print_fds_content(fds_path, ctx, console)
|
|
110
|
+
fds = load_fds(fds_path)
|
|
111
|
+
jinja_ctx = build_context(config, fds)
|
|
112
|
+
print_build_context(jinja_ctx, ctx, console)
|
|
113
|
+
render_all(config, jinja_ctx)
|
|
114
|
+
|
|
115
|
+
# Cleanup Tmp Dir
|
|
116
|
+
if tmp_dir:
|
|
117
|
+
shutil.rmtree(tmp_dir)
|
|
118
|
+
|
|
119
|
+
console.print("[bold green]✓[/] Validation successful")
|
|
120
|
+
|
|
121
|
+
|
|
122
|
+
def _print_files_table(files, proto_root):
|
|
123
|
+
table = Table(title="Available proto schemas")
|
|
124
|
+
table.add_column("Name", style="bold")
|
|
125
|
+
table.add_column("Path", style="dim")
|
|
126
|
+
for f in files:
|
|
127
|
+
table.add_row(f.stem, f.relative_to(proto_root).as_posix())
|
|
128
|
+
console.print(table)
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def _print_verbose_enabled(ctx: typer.Context):
|
|
132
|
+
if ctx.obj.verbose:
|
|
133
|
+
console.print("[dim]Verbose enabled[/]")
|
|
134
|
+
|
|
135
|
+
def _load_config(path: Path, ctx: typer.Context) -> CodegenConfig:
|
|
136
|
+
config = load_config(path, console)
|
|
137
|
+
if ctx.obj.verbose:
|
|
138
|
+
console.print(
|
|
139
|
+
Panel(
|
|
140
|
+
Pretty(
|
|
141
|
+
# pydantic v2
|
|
142
|
+
config.model_dump(mode="python"),
|
|
143
|
+
expand_all=True,
|
|
144
|
+
),
|
|
145
|
+
title=f"[bold]Loaded config[/] [dim]{path}[/]",
|
|
146
|
+
border_style="cyan",
|
|
147
|
+
)
|
|
148
|
+
)
|
|
149
|
+
return config
|
|
150
|
+
|
|
151
|
+
def main():
|
|
152
|
+
app()
|
|
153
|
+
|
|
154
|
+
if __name__ == "__main__":
|
|
155
|
+
main()
|
core/__init__.py
ADDED
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Core functionality for wizard code generation.
|
|
3
|
+
|
|
4
|
+
Contains the main business logic for configuration, rendering, and writing files.
|
|
5
|
+
"""
|
|
6
|
+
|
|
7
|
+
from .config import CodegenConfig, load_config
|
|
8
|
+
from .renderer import render_all, PlanItem
|
|
9
|
+
from .writer import apply_plan
|
|
10
|
+
from .context_builder import build_context, print_build_context
|
|
11
|
+
from .filter import where_ok
|
|
12
|
+
|
|
13
|
+
__all__ = [
|
|
14
|
+
"CodegenConfig",
|
|
15
|
+
"load_config",
|
|
16
|
+
"render_all",
|
|
17
|
+
"PlanItem",
|
|
18
|
+
"apply_plan",
|
|
19
|
+
"build_context",
|
|
20
|
+
"print_build_context",
|
|
21
|
+
"where_ok"
|
|
22
|
+
]
|
core/config.py
ADDED
|
@@ -0,0 +1,123 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
import yaml
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Any
|
|
7
|
+
from enum import Enum
|
|
8
|
+
from pydantic import BaseModel, ConfigDict, Field, ValidationError
|
|
9
|
+
from rich.console import Console
|
|
10
|
+
from rich.panel import Panel
|
|
11
|
+
|
|
12
|
+
class ForEachMode(str, Enum):
|
|
13
|
+
FILE = "file"
|
|
14
|
+
MESSAGE = "message"
|
|
15
|
+
ENUM = "enum"
|
|
16
|
+
SERVICE = "service"
|
|
17
|
+
|
|
18
|
+
class WriteMode(str, Enum):
|
|
19
|
+
OVERWRITE = "overwrite"
|
|
20
|
+
APPEND = "append"
|
|
21
|
+
WRITE_ONCE = "write-once"
|
|
22
|
+
|
|
23
|
+
class EqualsKV(BaseModel):
|
|
24
|
+
key: str
|
|
25
|
+
value: Any
|
|
26
|
+
|
|
27
|
+
# A single predicate
|
|
28
|
+
class Predicate(BaseModel):
|
|
29
|
+
# dotted selector like "name", "package", "file", "full_name"
|
|
30
|
+
# or "option" for descriptor options/custom options you extract into context
|
|
31
|
+
name: str | None = None
|
|
32
|
+
package: str | None = None
|
|
33
|
+
file: str | None = None
|
|
34
|
+
full_name: str | None = None
|
|
35
|
+
option_equals: EqualsKV | None = Field(default=None, alias="option.equals")
|
|
36
|
+
|
|
37
|
+
model_config = {"populate_by_name": True}
|
|
38
|
+
|
|
39
|
+
class Where(BaseModel):
|
|
40
|
+
all: list[Predicate] = Field(default_factory=list) # AND
|
|
41
|
+
any: list[Predicate] = Field(default_factory=list) # OR
|
|
42
|
+
not_: list[Predicate] = Field(default_factory=list, alias="not") # NOT
|
|
43
|
+
|
|
44
|
+
model_config = {"populate_by_name": True}
|
|
45
|
+
|
|
46
|
+
class RenderTarget(BaseModel):
|
|
47
|
+
template: str
|
|
48
|
+
output: str
|
|
49
|
+
mode: WriteMode = WriteMode.OVERWRITE
|
|
50
|
+
for_each: ForEachMode | None = None
|
|
51
|
+
where: Where | None = None
|
|
52
|
+
|
|
53
|
+
class ProtoSource(BaseModel):
|
|
54
|
+
git: str
|
|
55
|
+
ref: str = "latest-tag" # Default to latest semver tag
|
|
56
|
+
fds: str | None = None
|
|
57
|
+
include_info: bool = True
|
|
58
|
+
|
|
59
|
+
class ProtoConfig(BaseModel):
|
|
60
|
+
cache_dir: str
|
|
61
|
+
includes: list[str] = Field(default_factory=list)
|
|
62
|
+
files: list[str] = Field(default_factory=list)
|
|
63
|
+
root: str | None = ""
|
|
64
|
+
source: ProtoSource | None = None
|
|
65
|
+
|
|
66
|
+
class TargetConfig(BaseModel):
|
|
67
|
+
templates: str
|
|
68
|
+
out: str
|
|
69
|
+
render: list[RenderTarget] = Field(default_factory=list)
|
|
70
|
+
|
|
71
|
+
class HooksConfig(BaseModel):
|
|
72
|
+
root: str = "wizard"
|
|
73
|
+
module: str | None = None
|
|
74
|
+
|
|
75
|
+
class CodegenConfig(BaseModel):
|
|
76
|
+
# nice to have: forbid typos in YAML keys
|
|
77
|
+
model_config = ConfigDict(extra="forbid")
|
|
78
|
+
|
|
79
|
+
proto: ProtoConfig
|
|
80
|
+
targets: dict[str, TargetConfig]
|
|
81
|
+
hooks: HooksConfig = Field(default_factory=HooksConfig)
|
|
82
|
+
|
|
83
|
+
def _format_pydantic_errors(e: ValidationError) -> str:
|
|
84
|
+
lines: list[str] = []
|
|
85
|
+
for err in e.errors():
|
|
86
|
+
loc = ".".join(str(x) for x in err.get("loc", [])) or "<root>"
|
|
87
|
+
msg = err.get("msg", "Invalid value")
|
|
88
|
+
lines.append(f"• {loc}: {msg}")
|
|
89
|
+
return "\n".join(lines)
|
|
90
|
+
|
|
91
|
+
def _die_config(message: str, console: Console, code: int = 2,) -> typer.Never:
|
|
92
|
+
console.print(Panel(message, title="[bold red]Config error[/]", border_style="red"))
|
|
93
|
+
raise typer.Exit(code=code)
|
|
94
|
+
|
|
95
|
+
def load_config(path: Path, console: Console) -> CodegenConfig:
|
|
96
|
+
# 1) file read
|
|
97
|
+
try:
|
|
98
|
+
raw = path.read_text()
|
|
99
|
+
except FileNotFoundError:
|
|
100
|
+
_die_config(f"[bold]{path}[/] not found.\n\nPass a config with [cyan]--config[/].", console)
|
|
101
|
+
except OSError as e:
|
|
102
|
+
_die_config(f"Could not read [bold]{path}[/]: {e}", console)
|
|
103
|
+
|
|
104
|
+
# 2) YAML parse
|
|
105
|
+
try:
|
|
106
|
+
data: dict[str, Any] = yaml.safe_load(raw) or {}
|
|
107
|
+
except yaml.YAMLError as e:
|
|
108
|
+
_die_config(f"Invalid YAML in [bold]{path}[/]:\n\n[dim]{e}[/]", console)
|
|
109
|
+
|
|
110
|
+
# 3) Pydantic validate
|
|
111
|
+
try:
|
|
112
|
+
return CodegenConfig.model_validate(data)
|
|
113
|
+
except ValidationError as e:
|
|
114
|
+
problems = _format_pydantic_errors(e)
|
|
115
|
+
hint = (
|
|
116
|
+
"[bold]Expected top-level keys[/]\n"
|
|
117
|
+
"• proto\n"
|
|
118
|
+
"• targets\n"
|
|
119
|
+
"• hooks (optional)\n\n"
|
|
120
|
+
"[bold]Problems[/]\n"
|
|
121
|
+
f"{problems}"
|
|
122
|
+
)
|
|
123
|
+
_die_config(f"In [bold]{path}[/]:\n\n{hint}", console)
|
core/context_builder.py
ADDED
|
@@ -0,0 +1,321 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from collections import defaultdict, deque
|
|
4
|
+
from typing import Any, Dict, List
|
|
5
|
+
|
|
6
|
+
import typer
|
|
7
|
+
from google.protobuf import descriptor_pb2
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from rich.panel import Panel
|
|
10
|
+
from rich.pretty import Pretty
|
|
11
|
+
|
|
12
|
+
from .config import CodegenConfig
|
|
13
|
+
from utils import Name
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
def topo_order(files_by_name: dict[str, descriptor_pb2.FileDescriptorProto]) -> list[str]:
|
|
17
|
+
"""Topologically sort proto files based on their dependencies."""
|
|
18
|
+
deps = {n: [d for d in f.dependency if d in files_by_name] for n, f in files_by_name.items()}
|
|
19
|
+
indeg = {n: 0 for n in files_by_name}
|
|
20
|
+
rev = defaultdict(list)
|
|
21
|
+
|
|
22
|
+
for n, ds in deps.items():
|
|
23
|
+
for d in ds:
|
|
24
|
+
indeg[n] += 1
|
|
25
|
+
rev[d].append(n)
|
|
26
|
+
|
|
27
|
+
q = deque([n for n, deg in indeg.items() if deg == 0])
|
|
28
|
+
out = []
|
|
29
|
+
while q:
|
|
30
|
+
n = q.popleft()
|
|
31
|
+
out.append(n)
|
|
32
|
+
for m in rev[n]:
|
|
33
|
+
indeg[m] -= 1
|
|
34
|
+
if indeg[m] == 0:
|
|
35
|
+
q.append(m)
|
|
36
|
+
return out
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def _pkg_path(pkg: str) -> str:
|
|
40
|
+
"""Convert package name to path format."""
|
|
41
|
+
return pkg.replace(".", "/") if pkg else ""
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
def _build_full_name(package: str, name: str) -> str:
|
|
45
|
+
"""Build fully qualified name for a proto type."""
|
|
46
|
+
return f".{package}.{name}" if package else f".{name}"
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _build_field_data(field: Any) -> Dict[str, Any]:
|
|
50
|
+
"""Build field data dictionary from protobuf field descriptor."""
|
|
51
|
+
return {
|
|
52
|
+
"name": Name(field.name),
|
|
53
|
+
"number": field.number,
|
|
54
|
+
"label": int(field.label),
|
|
55
|
+
"type": int(field.type),
|
|
56
|
+
"type_name": field.type_name,
|
|
57
|
+
"json_name": field.json_name,
|
|
58
|
+
"field": field
|
|
59
|
+
}
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def _build_method_data(method: Any) -> Dict[str, Any]:
|
|
63
|
+
"""Build method data dictionary from protobuf method descriptor."""
|
|
64
|
+
return {
|
|
65
|
+
"name": Name(method.name),
|
|
66
|
+
"input_type": method.input_type,
|
|
67
|
+
"output_type": method.output_type,
|
|
68
|
+
"client_streaming": bool(method.client_streaming),
|
|
69
|
+
"server_streaming": bool(method.server_streaming),
|
|
70
|
+
"options": method.options,
|
|
71
|
+
}
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def _build_enum_value_data(value: Any) -> Dict[str, Any]:
|
|
75
|
+
"""Build enum value data dictionary from protobuf enum value descriptor."""
|
|
76
|
+
return {"name": value.name, "number": value.number}
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def _update_type_index(type_index: Dict[str, Any], full_name: str, kind: str, file_name: str, package: str) -> None:
|
|
80
|
+
"""Update the type index with a new type entry."""
|
|
81
|
+
type_index[full_name] = {"kind": kind, "file": file_name, "package": package}
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _process_nested_enum(
|
|
85
|
+
enum: Any,
|
|
86
|
+
parent_full_name: str,
|
|
87
|
+
file_name: str,
|
|
88
|
+
package: str,
|
|
89
|
+
type_index: Dict[str, Any],
|
|
90
|
+
enum_index: Dict[str, Any]
|
|
91
|
+
) -> Dict[str, Any]:
|
|
92
|
+
"""Process a nested enum within a message."""
|
|
93
|
+
full_name = f"{parent_full_name}.{enum.name}"
|
|
94
|
+
enum_values = [_build_enum_value_data(value) for value in enum.value]
|
|
95
|
+
|
|
96
|
+
enum_data = {
|
|
97
|
+
"name": Name(enum.name),
|
|
98
|
+
"full_name": full_name,
|
|
99
|
+
"file": file_name,
|
|
100
|
+
"package": package,
|
|
101
|
+
"enum_values": enum_values, # Use enum_values to avoid conflict with dict.values()
|
|
102
|
+
}
|
|
103
|
+
|
|
104
|
+
_update_type_index(type_index, full_name, "enum", file_name, package)
|
|
105
|
+
enum_index[full_name] = enum_data
|
|
106
|
+
|
|
107
|
+
return enum_data
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def _process_message_recursive(
|
|
111
|
+
message: Any,
|
|
112
|
+
parent_full_name: str,
|
|
113
|
+
file_name: str,
|
|
114
|
+
package: str,
|
|
115
|
+
type_index: Dict[str, Any],
|
|
116
|
+
message_index: Dict[str, Any],
|
|
117
|
+
enum_index: Dict[str, Any]
|
|
118
|
+
) -> Dict[str, Any]:
|
|
119
|
+
"""Process a message and its nested types recursively."""
|
|
120
|
+
# Build full name for this message
|
|
121
|
+
if parent_full_name:
|
|
122
|
+
full_name = f"{parent_full_name}.{message.name}"
|
|
123
|
+
else:
|
|
124
|
+
full_name = _build_full_name(package, message.name)
|
|
125
|
+
|
|
126
|
+
fields = [_build_field_data(field) for field in message.field]
|
|
127
|
+
|
|
128
|
+
# Process nested messages recursively
|
|
129
|
+
nested_messages = []
|
|
130
|
+
for nested_msg in message.nested_type:
|
|
131
|
+
nested_data = _process_message_recursive(
|
|
132
|
+
nested_msg,
|
|
133
|
+
full_name,
|
|
134
|
+
file_name,
|
|
135
|
+
package,
|
|
136
|
+
type_index,
|
|
137
|
+
message_index,
|
|
138
|
+
enum_index
|
|
139
|
+
)
|
|
140
|
+
nested_messages.append(nested_data)
|
|
141
|
+
|
|
142
|
+
# Process nested enums
|
|
143
|
+
nested_enums = []
|
|
144
|
+
for nested_enum in message.enum_type:
|
|
145
|
+
nested_enum_data = _process_nested_enum(
|
|
146
|
+
nested_enum,
|
|
147
|
+
full_name,
|
|
148
|
+
file_name,
|
|
149
|
+
package,
|
|
150
|
+
type_index,
|
|
151
|
+
enum_index
|
|
152
|
+
)
|
|
153
|
+
nested_enums.append(nested_enum_data)
|
|
154
|
+
|
|
155
|
+
message_data = {
|
|
156
|
+
"name": Name(message.name),
|
|
157
|
+
"full_name": full_name,
|
|
158
|
+
"fields": fields,
|
|
159
|
+
"file": file_name,
|
|
160
|
+
"package": package,
|
|
161
|
+
"nested_messages": nested_messages,
|
|
162
|
+
"nested_enums": nested_enums,
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
_update_type_index(type_index, full_name, "message", file_name, package)
|
|
166
|
+
message_index[full_name] = message_data
|
|
167
|
+
|
|
168
|
+
return message_data
|
|
169
|
+
|
|
170
|
+
|
|
171
|
+
def _process_messages(
|
|
172
|
+
proto_file: Any,
|
|
173
|
+
file_name: str,
|
|
174
|
+
package: str,
|
|
175
|
+
type_index: Dict[str, Any],
|
|
176
|
+
message_index: Dict[str, Any],
|
|
177
|
+
enum_index: Dict[str, Any]
|
|
178
|
+
) -> List[Dict[str, Any]]:
|
|
179
|
+
"""Process all messages in a proto file, including nested types."""
|
|
180
|
+
messages = []
|
|
181
|
+
|
|
182
|
+
for message in proto_file.message_type:
|
|
183
|
+
message_data = _process_message_recursive(
|
|
184
|
+
message,
|
|
185
|
+
"", # No parent for top-level messages
|
|
186
|
+
file_name,
|
|
187
|
+
package,
|
|
188
|
+
type_index,
|
|
189
|
+
message_index,
|
|
190
|
+
enum_index
|
|
191
|
+
)
|
|
192
|
+
messages.append(message_data)
|
|
193
|
+
|
|
194
|
+
return messages
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def _process_enums(
|
|
198
|
+
proto_file: Any,
|
|
199
|
+
file_name: str,
|
|
200
|
+
package: str,
|
|
201
|
+
type_index: Dict[str, Any],
|
|
202
|
+
enum_index: Dict[str, Any]
|
|
203
|
+
) -> List[Dict[str, Any]]:
|
|
204
|
+
"""Process all enums in a proto file."""
|
|
205
|
+
enums = []
|
|
206
|
+
|
|
207
|
+
for enum in proto_file.enum_type:
|
|
208
|
+
full_name = _build_full_name(package, enum.name)
|
|
209
|
+
enum_values = [_build_enum_value_data(value) for value in enum.value]
|
|
210
|
+
|
|
211
|
+
enum_data = {
|
|
212
|
+
"name": Name(enum.name),
|
|
213
|
+
"full_name": full_name,
|
|
214
|
+
"file": file_name,
|
|
215
|
+
"package": package,
|
|
216
|
+
"enum_values": enum_values, # Use enum_values to avoid conflict with dict.values()
|
|
217
|
+
}
|
|
218
|
+
|
|
219
|
+
enums.append(enum_data)
|
|
220
|
+
_update_type_index(type_index, full_name, "enum", file_name, package)
|
|
221
|
+
enum_index[full_name] = enum_data
|
|
222
|
+
|
|
223
|
+
return enums
|
|
224
|
+
|
|
225
|
+
|
|
226
|
+
def _process_services(
|
|
227
|
+
proto_file: Any,
|
|
228
|
+
file_name: str,
|
|
229
|
+
package: str,
|
|
230
|
+
service_index: Dict[str, Any]
|
|
231
|
+
) -> List[Dict[str, Any]]:
|
|
232
|
+
"""Process all services in a proto file."""
|
|
233
|
+
services = []
|
|
234
|
+
|
|
235
|
+
for service in proto_file.service:
|
|
236
|
+
full_name = _build_full_name(package, service.name)
|
|
237
|
+
methods = [_build_method_data(method) for method in service.method]
|
|
238
|
+
|
|
239
|
+
service_data = {
|
|
240
|
+
"name": Name(service.name),
|
|
241
|
+
"methods": methods,
|
|
242
|
+
"file": file_name,
|
|
243
|
+
"package": package
|
|
244
|
+
}
|
|
245
|
+
|
|
246
|
+
services.append(service_data)
|
|
247
|
+
service_index[full_name] = service_data
|
|
248
|
+
|
|
249
|
+
return services
|
|
250
|
+
|
|
251
|
+
|
|
252
|
+
def _process_proto_file(
|
|
253
|
+
proto_file: Any,
|
|
254
|
+
file_name: str,
|
|
255
|
+
type_index: Dict[str, Any],
|
|
256
|
+
message_index: Dict[str, Any],
|
|
257
|
+
enum_index: Dict[str, Any],
|
|
258
|
+
service_index: Dict[str, Any]
|
|
259
|
+
) -> Dict[str, Any]:
|
|
260
|
+
"""Process a single proto file and return its context data."""
|
|
261
|
+
package = proto_file.package
|
|
262
|
+
base_name = Path(file_name).stem
|
|
263
|
+
|
|
264
|
+
messages = _process_messages(proto_file, file_name, package, type_index, message_index, enum_index)
|
|
265
|
+
enums = _process_enums(proto_file, file_name, package, type_index, enum_index)
|
|
266
|
+
services = _process_services(proto_file, file_name, package, service_index)
|
|
267
|
+
|
|
268
|
+
return {
|
|
269
|
+
"name": Name(file_name),
|
|
270
|
+
"basename": base_name,
|
|
271
|
+
"package": package,
|
|
272
|
+
"package_path": _pkg_path(package),
|
|
273
|
+
"imports": list(proto_file.dependency),
|
|
274
|
+
"messages": messages,
|
|
275
|
+
"enums": enums,
|
|
276
|
+
"services": services,
|
|
277
|
+
"options": proto_file.options,
|
|
278
|
+
}
|
|
279
|
+
|
|
280
|
+
|
|
281
|
+
def build_context(cfg: CodegenConfig, fds: descriptor_pb2.FileDescriptorSet) -> Dict[str, Any]:
|
|
282
|
+
"""Build the complete context for code generation from proto file descriptors."""
|
|
283
|
+
files_by_name = {f.name: f for f in fds.file}
|
|
284
|
+
ordered_files = topo_order(files_by_name)
|
|
285
|
+
|
|
286
|
+
# Initialize indexes
|
|
287
|
+
type_index = {}
|
|
288
|
+
message_index = {}
|
|
289
|
+
enum_index = {}
|
|
290
|
+
service_index = {}
|
|
291
|
+
ctx_files = []
|
|
292
|
+
|
|
293
|
+
# Process each file in topological order
|
|
294
|
+
for file_name in ordered_files:
|
|
295
|
+
proto_file = files_by_name[file_name]
|
|
296
|
+
file_context = _process_proto_file(
|
|
297
|
+
proto_file, file_name, type_index, message_index, enum_index, service_index
|
|
298
|
+
)
|
|
299
|
+
ctx_files.append(file_context)
|
|
300
|
+
|
|
301
|
+
return {
|
|
302
|
+
"proto_root": cfg.proto.root,
|
|
303
|
+
"files": ctx_files,
|
|
304
|
+
"types": type_index,
|
|
305
|
+
"message": message_index,
|
|
306
|
+
"enum": enum_index,
|
|
307
|
+
"service": service_index,
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def print_build_context(ctx: Dict[str, Any], typer_ctx: typer.Context, console: Console,
|
|
312
|
+
title: str = "Jinja2 context") -> None:
|
|
313
|
+
"""Print the build context if verbose mode is enabled."""
|
|
314
|
+
if typer_ctx.obj.verbose:
|
|
315
|
+
console.print(
|
|
316
|
+
Panel(
|
|
317
|
+
Pretty(ctx, expand_all=True),
|
|
318
|
+
title=f"[bold]{title}[/]",
|
|
319
|
+
border_style="cyan",
|
|
320
|
+
)
|
|
321
|
+
)
|