wizard-codegen 0.1.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
core/filter.py ADDED
@@ -0,0 +1,65 @@
1
+ from __future__ import annotations
2
+ import re
3
+ from typing import Any
4
+ from .config import Where, Predicate
5
+ import fnmatch
6
+
7
+ def _get_opt(item: dict, key: str) -> Any:
8
+ # expect custom options extracted into item["options"] as dict
9
+ return (item.get("options") or {}).get(key)
10
+
11
+ def _match_regex(value: str | None, pattern: str) -> bool:
12
+ if value is None:
13
+ return False
14
+
15
+ # Treat patterns containing glob chars as glob
16
+ if any(ch in pattern for ch in ["*", "?", "[", "]"]):
17
+ return fnmatch.fnmatch(value, pattern)
18
+
19
+ return re.search(pattern, value) is not None
20
+
21
+ def _pred_ok(item: dict, p: Predicate) -> bool:
22
+ if p.name and not _match_regex(_name_raw(item), p.name):
23
+ return False
24
+ if p.package and not _match_regex(item.get("package"), p.package):
25
+ return False
26
+ if p.file and not _match_regex(item.get("file"), p.file):
27
+ return False
28
+ if p.full_name and not _match_regex(item.get("full_name"), p.full_name):
29
+ return False
30
+ if p.option_equals:
31
+ if _get_opt(item, p.option_equals.key) != p.option_equals.value:
32
+ return False
33
+ return True
34
+
35
+ def where_ok(item: dict, where: Where | None) -> bool:
36
+ if where is None:
37
+ return True
38
+
39
+ # NOT
40
+ for p in where.not_:
41
+ if _pred_ok(item, p):
42
+ return False
43
+
44
+ # ALL (AND)
45
+ for p in where.all:
46
+ if not _pred_ok(item, p):
47
+ return False
48
+
49
+ # ANY (OR) - if present, must match at least one
50
+ if where.any:
51
+ if not any(_pred_ok(item, p) for p in where.any):
52
+ return False
53
+
54
+ return True
55
+
56
+ def _name_raw(item: dict) -> str | None:
57
+ n = item.get("name")
58
+ if n is None:
59
+ return None
60
+ if isinstance(n, str):
61
+ return n
62
+ if isinstance(n, dict):
63
+ return n.get("raw")
64
+ # dataclass / pydantic / simple object
65
+ return getattr(n, "raw", str(n))
core/renderer.py ADDED
@@ -0,0 +1,77 @@
1
+ from __future__ import annotations
2
+ from dataclasses import dataclass
3
+ from pathlib import Path
4
+ from jinja2 import Environment, FileSystemLoader, StrictUndefined
5
+
6
+ from .config import CodegenConfig
7
+ from hooks import load_hooks
8
+ from .filter import where_ok
9
+
10
+ @dataclass
11
+ class PlanItem:
12
+ output_path: Path
13
+ content: str
14
+ mode: str = "overwrite"
15
+
16
+ def _make_env(template_dir: Path) -> Environment:
17
+ env = Environment(
18
+ loader=FileSystemLoader(str(template_dir)),
19
+ undefined=StrictUndefined,
20
+ trim_blocks=True,
21
+ lstrip_blocks=True,
22
+ )
23
+ # add a couple generic filters
24
+ env.filters["replace"] = lambda s, a, b: str(s).replace(a, b)
25
+ return env
26
+
27
+ def _get_items_for_each_type(context: dict, for_each_type: str):
28
+ """Get the appropriate items to iterate over based on for_each type."""
29
+ if for_each_type == "file":
30
+ return context["files"]
31
+ elif for_each_type == "message":
32
+ return context["message"].values()
33
+ elif for_each_type == "enum":
34
+ return context["enum"].values()
35
+ elif for_each_type == "service":
36
+ return context["service"].values()
37
+ else:
38
+ raise ValueError(f"Unknown for_each type: {for_each_type}")
39
+
40
+ def _render_item(item, context: dict, tpl, out_tpl, out_root: Path, mode: str) -> PlanItem:
41
+ """Render a single item and return a PlanItem."""
42
+ ctx = dict(context)
43
+ ctx["item"] = item
44
+ rel_out = Path(out_tpl.render(**ctx))
45
+ content = tpl.render(**ctx)
46
+ return PlanItem(output_path=out_root / rel_out, content=content, mode=mode)
47
+
48
+ def render_all(cfg: CodegenConfig, context: dict, out_override: Path | None = None) -> list[PlanItem]:
49
+ plan: list[PlanItem] = []
50
+
51
+ for target, tcfg in cfg.targets.items():
52
+ template_dir = (Path(tcfg.templates)).resolve()
53
+ out_root = (out_override or Path(tcfg.out)).resolve()
54
+
55
+ env = _make_env(template_dir)
56
+
57
+ hooks = load_hooks(cfg, Path(cfg.hooks.root))
58
+ if hooks:
59
+ hooks.register(env, target=target, config=cfg)
60
+
61
+ for ep in tcfg.render:
62
+ tpl = env.get_template(ep.template)
63
+ out_tpl = env.from_string(ep.output)
64
+
65
+ if ep.for_each:
66
+ items = _get_items_for_each_type(context, ep.for_each)
67
+ for item in items:
68
+ if not where_ok(item, ep.where):
69
+ continue
70
+ plan_item = _render_item(item, context, tpl, out_tpl, out_root, ep.mode)
71
+ plan.append(plan_item)
72
+ else:
73
+ rel_out = Path(out_tpl.render(**context))
74
+ content = tpl.render(**context)
75
+ plan.append(PlanItem(output_path=out_root / rel_out, content=content, mode=ep.mode))
76
+
77
+ return plan
core/writer.py ADDED
@@ -0,0 +1,70 @@
1
+ from __future__ import annotations
2
+
3
+ import hashlib
4
+ from typing import Literal
5
+
6
+ from .renderer import PlanItem
7
+
8
+
9
+ def _sha256(s: str) -> str:
10
+ return hashlib.sha256(s.encode("utf-8")).hexdigest()
11
+
12
+
13
+ def apply_plan(plan: list[PlanItem], dry_run: bool, verbose: bool):
14
+ for item in plan:
15
+ mode: Literal["overwrite", "append", "write-once"] = getattr(item, "mode", "overwrite")
16
+
17
+ # write-once: if the file exists, never touch it
18
+ if mode == "write-once" and item.output_path.exists():
19
+ if verbose:
20
+ print(f"skip (write-once exists): {item.output_path}")
21
+ continue
22
+
23
+ if not dry_run:
24
+ item.output_path.parent.mkdir(parents=True, exist_ok=True)
25
+
26
+ new_hash = _sha256(item.content)
27
+
28
+ old_text = ""
29
+ if item.output_path.exists():
30
+ old_text = item.output_path.read_text(encoding="utf-8")
31
+
32
+ if mode == "overwrite":
33
+ old_hash = _sha256(old_text)
34
+ if old_hash == new_hash:
35
+ if verbose:
36
+ print(f"skip (unchanged): {item.output_path}")
37
+ continue
38
+
39
+ elif mode == "append":
40
+ # guard against duplicating the same block
41
+ if old_text.endswith(item.content):
42
+ if verbose:
43
+ print(f"skip (already appended): {item.output_path}")
44
+ continue
45
+
46
+ elif mode == "write-once":
47
+ # handled earlier, but keep it explicit
48
+ if verbose:
49
+ print(f"skip (write-once exists): {item.output_path}")
50
+ continue
51
+
52
+ else:
53
+ raise ValueError(f"Unknown mode {mode!r} for {item.output_path}")
54
+
55
+ action = "append" if mode == "append" and item.output_path.exists() else "write"
56
+
57
+ if dry_run:
58
+ print(f"would {action}: {item.output_path}")
59
+ continue
60
+
61
+ if mode == "append" and item.output_path.exists():
62
+ # ensure we start on a new line
63
+ prefix = "" if old_text.endswith("\n") or item.content.startswith("\n") else "\n"
64
+ item.output_path.write_text(old_text + prefix + item.content, encoding="utf-8")
65
+ else:
66
+ # overwrite + write_once (new file) both come here
67
+ item.output_path.write_text(item.content, encoding="utf-8")
68
+
69
+ if verbose:
70
+ print(f"{action}d: {item.output_path}")
hooks/__init__.py ADDED
@@ -0,0 +1,6 @@
1
+ from .hooks import load_hooks, HooksModule
2
+
3
+ __all__ = [
4
+ "load_hooks",
5
+ "HooksModule"
6
+ ]
hooks/hooks.py ADDED
@@ -0,0 +1,28 @@
1
+ import importlib
2
+ from pathlib import Path
3
+ from typing import Protocol, runtime_checkable
4
+
5
+ from jinja2 import Environment
6
+ from core import CodegenConfig
7
+
8
+
9
+ @runtime_checkable
10
+ class HooksModule(Protocol):
11
+ def register(self, env: Environment, *, target: str, config: CodegenConfig) -> None: ...
12
+
13
+
14
+ def load_hooks(cfg: CodegenConfig, repo_root: Path) -> HooksModule | None:
15
+ if not cfg.hooks.module:
16
+ return None
17
+
18
+ import sys
19
+ if str(repo_root) not in sys.path:
20
+ sys.path.insert(0, str(repo_root))
21
+
22
+ try:
23
+ module = importlib.import_module(cfg.hooks.module)
24
+ if not isinstance(module, HooksModule):
25
+ raise ValueError(f"Hooks module {cfg.hooks.module} doesn't implement required interface")
26
+ return module
27
+ except ImportError as e:
28
+ raise RuntimeError(f"Failed to import hooks module {cfg.hooks.module}: {e}") from e
proto/__init__.py ADDED
@@ -0,0 +1,19 @@
1
+ """
2
+ Protobuf handling functionality.
3
+
4
+ Handles proto file discovery, descriptor set building, and proto source resolution.
5
+ """
6
+
7
+ from .discover import discover_proto_files
8
+ from .fds_loader import load_fds, print_fds_content
9
+ from .proto_source import resolve_proto_root, ensure_git_checkout, LATEST_TAG
10
+ from .protoc_runner import build_descriptor_set
11
+
12
+ __all__ = [
13
+ "discover_proto_files",
14
+ "load_fds",
15
+ "print_fds_content",
16
+ "resolve_proto_root",
17
+ "ensure_git_checkout",
18
+ "build_descriptor_set",
19
+ ]
proto/discover.py ADDED
@@ -0,0 +1,70 @@
1
+ from __future__ import annotations
2
+
3
+ from pathlib import Path
4
+ import re
5
+
6
+ from core import CodegenConfig
7
+
8
+
9
+ _PROTO_ROOT_TOKEN = re.compile(r"\{\s*proto_root\s*\}")
10
+
11
+ def _resolve_include(proto_root: Path, inc: str) -> list[Path]:
12
+ """
13
+ inc can be:
14
+ - relative folder: "google" or "foo/bar"
15
+ - templated: "{proto_root}/google" or "{ proto_root }/google"
16
+ - globby: "google/**" or "{proto_root}/**/v1"
17
+ Returns 1..N base paths (if glob expands).
18
+ """
19
+ proto_root = proto_root.resolve()
20
+
21
+ # Replace mustache-ish token if present
22
+ if _PROTO_ROOT_TOKEN.search(inc):
23
+ expanded = _PROTO_ROOT_TOKEN.sub(str(proto_root), inc)
24
+ base = Path(expanded)
25
+ else:
26
+ base = proto_root / inc
27
+
28
+ # If include contains glob chars, expand it
29
+ s = str(base)
30
+ if any(ch in s for ch in ["*", "?", "["]):
31
+ # glob needs a directory base; easiest is to glob from filesystem root if absolute
32
+ if base.is_absolute():
33
+ # Use Path("/") as root for absolute globs
34
+ matches = [p.resolve() for p in Path("/").glob(s.lstrip("/"))]
35
+ else:
36
+ matches = [p.resolve() for p in proto_root.glob(str(base.relative_to(proto_root)))]
37
+ return [p for p in matches if p.exists()]
38
+ else:
39
+ return [base.resolve()]
40
+
41
+
42
+ def discover_proto_files(proto_root: Path, config: CodegenConfig) -> list[Path]:
43
+ proto_root = proto_root.resolve()
44
+
45
+ roots: list[Path] = []
46
+ if config.proto.includes:
47
+ for inc in config.proto.includes:
48
+ for r in _resolve_include(proto_root, inc):
49
+ # keep only dirs that are inside proto_root (avoid escaping)
50
+ if r == proto_root or proto_root in r.parents:
51
+ roots.append(r)
52
+ else:
53
+ roots = [proto_root]
54
+
55
+ out: list[Path] = []
56
+ for base in roots:
57
+ if not base.exists():
58
+ continue
59
+ for pat in config.proto.files:
60
+ out.extend(base.glob(pat)) # supports **/*.proto, etc.
61
+
62
+ # unique + stable
63
+ uniq: list[Path] = []
64
+ seen: set[Path] = set()
65
+ for p in sorted(out):
66
+ rp = p.resolve()
67
+ if rp.is_file() and rp.suffix == ".proto" and rp not in seen:
68
+ uniq.append(rp)
69
+ seen.add(rp)
70
+ return uniq
proto/fds_loader.py ADDED
@@ -0,0 +1,51 @@
1
+ from __future__ import annotations
2
+ from pathlib import Path
3
+
4
+ import typer
5
+ from rich.console import Console
6
+ from rich.tree import Tree
7
+ from rich.panel import Panel
8
+
9
+ from google.protobuf import descriptor_pb2
10
+
11
+ def load_fds(path: Path) -> descriptor_pb2.FileDescriptorSet:
12
+ fds = descriptor_pb2.FileDescriptorSet()
13
+ fds.ParseFromString(path.read_bytes())
14
+ return fds
15
+
16
+ def print_fds_content(fds_path: Path, ctx: typer.Context, console: Console) -> None:
17
+ if not ctx.obj.verbose:
18
+ return
19
+
20
+ fds = descriptor_pb2.FileDescriptorSet()
21
+ fds.ParseFromString(fds_path.read_bytes())
22
+
23
+ tree = Tree(f"[bold]Descriptor Set[/] [dim]{fds_path}[/]")
24
+
25
+ files_node = tree.add(f"[cyan]Files[/] ({len(fds.file)})")
26
+ for fd in sorted(fds.file, key=lambda x: x.name):
27
+ file_node = files_node.add(f"[bold]{fd.name}[/] [dim]package={fd.package or '-'}[/]")
28
+
29
+ if fd.dependency:
30
+ deps = file_node.add(f"[dim]deps[/] ({len(fd.dependency)})")
31
+ for d in fd.dependency:
32
+ deps.add(d)
33
+
34
+ if fd.message_type:
35
+ msgs = file_node.add(f"[green]messages[/] ({len(fd.message_type)})")
36
+ for m in fd.message_type:
37
+ msgs.add(m.name)
38
+
39
+ if fd.enum_type:
40
+ enums = file_node.add(f"[magenta]enums[/] ({len(fd.enum_type)})")
41
+ for e in fd.enum_type:
42
+ enums.add(e.name)
43
+
44
+ if fd.service:
45
+ svcs = file_node.add(f"[yellow]services[/] ({len(fd.service)})")
46
+ for s in fd.service:
47
+ svc = svcs.add(s.name)
48
+ for meth in s.method:
49
+ svc.add(f"{meth.name} [dim]{meth.input_type} -> {meth.output_type}[/]")
50
+
51
+ console.print(Panel(tree, title="[bold]Descriptor contents[/]", border_style="blue"))
proto/proto_source.py ADDED
@@ -0,0 +1,119 @@
1
+ from __future__ import annotations
2
+ from pathlib import Path
3
+ import subprocess
4
+ import re
5
+
6
+ from core.config import CodegenConfig, ProtoSource
7
+
8
+ LATEST_TAG = "latest-tag"
9
+
10
+
11
+ def _run(cmd: list[str], cwd: Path | None = None) -> str:
12
+ result = subprocess.run(cmd, cwd=str(cwd) if cwd else None, check=True, capture_output=True, text=True)
13
+ return result.stdout
14
+
15
+
16
+ def _parse_semver(tag: str) -> tuple[int, int, int, str] | None:
17
+ """
18
+ Parse a semver-like tag into a comparable tuple.
19
+ Supports formats: v1.2.3, 1.2.3, v1.2.3-beta, etc.
20
+ Returns (major, minor, patch, suffix) or None if not a valid semver.
21
+ """
22
+ # Remove leading 'v' if present
23
+ version = tag.lstrip('v')
24
+
25
+ # Match semver pattern: major.minor.patch with optional suffix
26
+ match = re.match(r'^(\d+)\.(\d+)\.(\d+)(.*)$', version)
27
+ if not match:
28
+ return None
29
+
30
+ major, minor, patch, suffix = match.groups()
31
+ return (int(major), int(minor), int(patch), suffix)
32
+
33
+
34
+ def _get_latest_semver_tag(dst: Path) -> str:
35
+ """
36
+ Get all tags from the repo and return the latest one by semver.
37
+ Raises RuntimeError if no valid semver tags are found.
38
+ """
39
+ output = _run(["git", "tag", "--list"], cwd=dst)
40
+ tags = [t.strip() for t in output.strip().split('\n') if t.strip()]
41
+
42
+ if not tags:
43
+ raise RuntimeError("No tags found in repository")
44
+
45
+ # Parse and filter valid semver tags
46
+ semver_tags = []
47
+ for tag in tags:
48
+ parsed = _parse_semver(tag)
49
+ if parsed:
50
+ # Sort by (major, minor, patch), prefer no suffix over suffix
51
+ # Empty suffix sorts after non-empty (we want stable releases first)
52
+ sort_key = (parsed[0], parsed[1], parsed[2], parsed[3] == '', parsed[3])
53
+ semver_tags.append((sort_key, tag))
54
+
55
+ if not semver_tags:
56
+ raise RuntimeError(f"No valid semver tags found. Available tags: {', '.join(tags)}")
57
+
58
+ # Sort by semver and get the latest
59
+ semver_tags.sort(reverse=True)
60
+ latest_tag = semver_tags[0][1]
61
+
62
+ return latest_tag
63
+
64
+
65
+ def ensure_git_checkout(repo_url: str, ref: str, dst: Path) -> str:
66
+ """
67
+ Ensure git repo is cloned and checked out to the specified ref.
68
+ If ref is "latest-tag", resolves to the latest semver tag.
69
+ Returns the actual ref that was checked out.
70
+ """
71
+ dst.parent.mkdir(parents=True, exist_ok=True)
72
+ if not dst.exists():
73
+ _run(["git", "clone", repo_url, str(dst)])
74
+ _run(["git", "fetch", "--all", "--tags"], cwd=dst)
75
+
76
+ actual_ref = ref
77
+ if ref == LATEST_TAG:
78
+ actual_ref = _get_latest_semver_tag(dst)
79
+
80
+ _run(["git", "checkout", "--force", actual_ref], cwd=dst)
81
+ return actual_ref
82
+
83
+ def resolve_proto_root(cfg: CodegenConfig, *, use_local: bool = False) -> Path:
84
+ """
85
+ Resolve the proto root directory.
86
+
87
+ When use_local=True (--local flag):
88
+ Uses proto.root from config (local filesystem path)
89
+
90
+ When use_local=False (default):
91
+ Uses proto.source.git to clone/fetch from git repository
92
+ Falls back to proto.root if no git source is configured
93
+ """
94
+ if use_local:
95
+ # --local flag: use local proto.root
96
+ if cfg.proto.root and cfg.proto.root != "":
97
+ p = Path(cfg.proto.root).resolve()
98
+ if p.exists():
99
+ return p
100
+ raise RuntimeError(f"proto.root path does not exist: {cfg.proto.root}")
101
+ raise RuntimeError("--local flag requires proto.root to be configured")
102
+
103
+ # Default: use git source
104
+ if cfg.proto.source:
105
+ src = cfg.proto.source
106
+ git_url = src.git
107
+ ref = src.ref or LATEST_TAG # Default to latest-tag if not specified
108
+ if git_url:
109
+ cache_dir = Path(cfg.proto.cache_dir).resolve()
110
+ ensure_git_checkout(git_url, ref, cache_dir)
111
+ return cache_dir
112
+
113
+ # Fallback to local root if no git source configured
114
+ if cfg.proto.root and cfg.proto.root != "":
115
+ p = Path(cfg.proto.root).resolve()
116
+ if p.exists():
117
+ return p
118
+
119
+ raise RuntimeError("No proto.source.git configured and proto.root not found")
proto/protoc_runner.py ADDED
@@ -0,0 +1,55 @@
1
+ from __future__ import annotations
2
+ from pathlib import Path
3
+ import subprocess
4
+ import tempfile
5
+
6
+ from core import CodegenConfig
7
+
8
+ def build_descriptor_set(
9
+ config: CodegenConfig,
10
+ proto_root: Path,
11
+ proto_files: list[Path],
12
+ verbose: bool,
13
+ ) -> tuple[Path, Path | None]:
14
+ proto_root = proto_root.resolve()
15
+
16
+ if config.proto.source and config.proto.source.fds:
17
+ return Path(config.proto.source.fds), None
18
+
19
+ cache_dir = Path(config.proto.cache_dir).resolve() if config.proto.cache_dir else (Path.cwd() / ".cache")
20
+ out_dir = Path(tempfile.mkdtemp(prefix="wizard_protoc_codegen_", dir=cache_dir))
21
+ fds_path = out_dir / "descriptor.pb"
22
+
23
+ cmd: list[str] = ["protoc"]
24
+
25
+ # Include roots: proto_root + each configured include dir (resolve relative to proto_root)
26
+ include_dirs: list[Path] = [proto_root]
27
+ for inc in (config.proto.includes or []):
28
+ # if you support {proto_root} templating, apply it here the same way you do in discovery
29
+ p = (proto_root / inc).resolve() if "{proto_root}" not in inc else Path(inc.replace("{proto_root}", str(proto_root))).resolve()
30
+ if p.exists():
31
+ include_dirs.append(p)
32
+
33
+ # de-dupe include dirs
34
+ seen = set()
35
+ for d in include_dirs:
36
+ ds = str(d)
37
+ if ds not in seen:
38
+ cmd += ["-I", ds]
39
+ seen.add(ds)
40
+
41
+ cmd += ["--include_imports"]
42
+ if config.proto.source and getattr(config.proto.source, "include_info", False):
43
+ cmd += ["--include_source_info"]
44
+
45
+ cmd += [f"--descriptor_set_out={fds_path}"]
46
+
47
+ # pass proto file paths relative to proto_root
48
+ rels = [str(p.resolve().relative_to(proto_root)) for p in proto_files]
49
+ cmd += rels
50
+
51
+ if verbose:
52
+ print(" ".join(cmd))
53
+
54
+ subprocess.run(cmd, cwd=str(proto_root), check=True)
55
+ return fds_path, out_dir
utils/__init__.py ADDED
@@ -0,0 +1,17 @@
1
+ """
2
+ Utility functions and classes.
3
+
4
+ Contains helper functions for name transformations and other utilities.
5
+ """
6
+
7
+ from .name import Name, to_snake, to_kebab, to_pascal, to_camel, to_macro_snake, to_macro
8
+
9
+ __all__ = [
10
+ "Name",
11
+ "to_snake",
12
+ "to_kebab",
13
+ "to_pascal",
14
+ "to_camel",
15
+ "to_macro",
16
+ "to_macro_snake"
17
+ ]
utils/name.py ADDED
@@ -0,0 +1,69 @@
1
+ from __future__ import annotations
2
+ import re
3
+ from dataclasses import dataclass
4
+
5
+ _WORD_RE = re.compile(r"[A-Z]+(?=[A-Z][a-z]|[0-9]|$)|[A-Z]?[a-z]+|[0-9]+")
6
+
7
+ def _words(s: str) -> list[str]:
8
+ # Handles "MyMessage", "my_message", "my-message", "my message", "HTTPServer2"
9
+ s = s.replace("_", " ").replace("-", " ").strip()
10
+ parts: list[str] = []
11
+ for token in s.split():
12
+ parts.extend(_WORD_RE.findall(token))
13
+ return [p for p in parts if p]
14
+
15
+ def to_snake(s: str) -> str:
16
+ w = _words(s)
17
+ return "_".join(p.lower() for p in w)
18
+
19
+ def to_kebab(s: str) -> str:
20
+ w = _words(s)
21
+ return "-".join(p.lower() for p in w)
22
+
23
+ def to_pascal(s: str) -> str:
24
+ w = _words(s)
25
+ return "".join(p[:1].upper() + p[1:].lower() for p in w)
26
+
27
+ def to_camel(s: str) -> str:
28
+ w = _words(s)
29
+ if not w:
30
+ return ""
31
+ first = w[0].lower()
32
+ rest = "".join(p[:1].upper() + p[1:].lower() for p in w[1:])
33
+ return first + rest
34
+
35
+ def to_macro(s: str) -> str:
36
+ w = _words(s)
37
+ return "".join(p.upper() for p in w)
38
+
39
+ def to_macro_snake(s: str) -> str:
40
+ w = _words(s)
41
+ return "_".join(p.upper() for p in w)
42
+
43
+ @dataclass(frozen=True)
44
+ class Name:
45
+ raw: str
46
+
47
+ @property
48
+ def snake_case(self) -> str:
49
+ return to_snake(self.raw)
50
+
51
+ @property
52
+ def kebab_case(self) -> str:
53
+ return to_kebab(self.raw)
54
+
55
+ @property
56
+ def pascal_case(self) -> str:
57
+ return to_pascal(self.raw)
58
+
59
+ @property
60
+ def camel_case(self) -> str:
61
+ return to_camel(self.raw)
62
+
63
+ @property
64
+ def macro_case(self) -> str:
65
+ return to_macro(self.raw)
66
+
67
+ @property
68
+ def macro_snake_case(self) -> str:
69
+ return to_macro_snake(self.raw)