prgen-cli 0.2.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
prgen/config.py ADDED
@@ -0,0 +1,203 @@
1
+ """User config: JSON file under XDG config dir."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import json
6
+ import os
7
+ from pathlib import Path
8
+ from typing import Literal, cast, get_args
9
+
10
+ from prgen.defaults import ModelTier, ProviderChoice
11
+
12
+ # Secrets merged into os.environ via load_prgen_env (setdefault only).
13
+ SECRET_KEYS = frozenset(
14
+ {
15
+ "OPENAI_API_KEY",
16
+ "GOOGLE_API_KEY",
17
+ }
18
+ )
19
+
20
+ # Optional CLI defaults (same JSON file).
21
+ SETTINGS_KEYS = frozenset({"base", "provider", "tier"})
22
+
23
+ PERSISTED_KEYS = SECRET_KEYS | SETTINGS_KEYS
24
+
25
+ PersistedConfigKey = Literal["OPENAI_API_KEY", "GOOGLE_API_KEY", "base", "provider", "tier"]
26
+ assert frozenset(get_args(PersistedConfigKey)) == PERSISTED_KEYS
27
+
28
+ CONFIG_FILENAME = "config.json"
29
+
30
+
31
+ def config_dir() -> Path:
32
+ """XDG-style config directory: $XDG_CONFIG_HOME/prgen or ~/.config/prgen."""
33
+ xdg = os.environ.get("XDG_CONFIG_HOME")
34
+ if xdg:
35
+ return Path(xdg) / "prgen"
36
+ return Path.home() / ".config" / "prgen"
37
+
38
+
39
+ def user_config_path() -> Path:
40
+ """Machine-wide config file (JSON)."""
41
+ return config_dir() / CONFIG_FILENAME
42
+
43
+
44
+ def _atomic_write_json(path: Path, data: dict[str, str]) -> None:
45
+ path.parent.mkdir(parents=True, exist_ok=True)
46
+ text = json.dumps(data, indent=2, sort_keys=True) + "\n"
47
+ tmp = path.with_name(path.name + ".tmp")
48
+ tmp.write_text(text, encoding="utf-8")
49
+ tmp.replace(path)
50
+ try:
51
+ path.chmod(0o600)
52
+ except OSError:
53
+ pass
54
+
55
+
56
+ def _normalize_raw_dict(raw: dict) -> dict[str, str]:
57
+ """Coerce JSON object to string values; skip non-scalars."""
58
+ out: dict[str, str] = {}
59
+ for k, v in raw.items():
60
+ if not isinstance(k, str):
61
+ continue
62
+ if v is None:
63
+ continue
64
+ if isinstance(v, str):
65
+ out[k] = v
66
+ elif isinstance(v, bool):
67
+ out[k] = str(v).lower()
68
+ elif isinstance(v, int | float):
69
+ out[k] = str(v)
70
+ return out
71
+
72
+
73
+ def validate_persisted_config(data: dict[str, str], *, path_hint: Path | None = None) -> None:
74
+ """Raise ValueError if base/provider/tier are present but invalid."""
75
+ loc = f" in {path_hint}" if path_hint else ""
76
+ if "base" in data and not data["base"].strip():
77
+ raise ValueError(f"config key 'base' must be non-empty{loc}")
78
+ if "provider" in data:
79
+ p = data["provider"]
80
+ if p not in ("auto", "openai", "gemini"):
81
+ raise ValueError(
82
+ f"config key 'provider' must be auto, openai, or gemini{loc}; got {p!r}"
83
+ )
84
+ if "tier" in data:
85
+ t = data["tier"]
86
+ if t not in ("default", "pro"):
87
+ raise ValueError(f"config key 'tier' must be default or pro{loc}; got {t!r}")
88
+
89
+
90
+ def _read_raw_config() -> dict[str, str]:
91
+ path = user_config_path()
92
+ if not path.is_file():
93
+ return {}
94
+ try:
95
+ raw = json.loads(path.read_text(encoding="utf-8"))
96
+ except json.JSONDecodeError as exc:
97
+ raise ValueError(
98
+ f"Invalid JSON in {path}: {exc}. Fix or remove the file and run prgen config."
99
+ ) from exc
100
+ if not isinstance(raw, dict):
101
+ raise ValueError(f"{path} must contain a JSON object.")
102
+ out = _normalize_raw_dict(raw)
103
+ validate_persisted_config(out, path_hint=path)
104
+ return out
105
+
106
+
107
+ def read_persisted_config() -> dict[str, str]:
108
+ """Load validated config.json; empty dict if the file is missing."""
109
+ return _read_raw_config()
110
+
111
+
112
+ def load_prgen_env() -> None:
113
+ """Merge secret keys from config.json into os.environ (setdefault only)."""
114
+ data = _read_raw_config()
115
+ for key in SECRET_KEYS:
116
+ if key in data:
117
+ os.environ.setdefault(key, data[key])
118
+
119
+
120
+ DISPLAY_KEY_ORDER: tuple[str, ...] = (
121
+ "OPENAI_API_KEY",
122
+ "GOOGLE_API_KEY",
123
+ "base",
124
+ "provider",
125
+ "tier",
126
+ )
127
+
128
+
129
+ def public_config_rows() -> list[tuple[str, str, bool]]:
130
+ """Safe rows for printing: (key, display_value, is_secret).
131
+
132
+ Secrets are never echoed; value is \"set\" or \"unset\".
133
+ """
134
+ data = _read_raw_config()
135
+ rows: list[tuple[str, str, bool]] = []
136
+ for key in DISPLAY_KEY_ORDER:
137
+ is_secret = key in SECRET_KEYS
138
+ if is_secret:
139
+ raw = data.get(key, "")
140
+ disp = "set" if (isinstance(raw, str) and raw.strip()) else "unset"
141
+ else:
142
+ raw = data.get(key)
143
+ if raw is None or not str(raw).strip():
144
+ disp = "unset"
145
+ else:
146
+ disp = str(raw).strip()
147
+ rows.append((key, disp, is_secret))
148
+ return rows
149
+
150
+
151
+ def read_cli_defaults_from_config() -> tuple[str | None, ProviderChoice | None, ModelTier | None]:
152
+ """Optional base, provider, tier from config.json (CLI uses when flags are omitted)."""
153
+ data = _read_raw_config()
154
+ base: str | None = None
155
+ if "base" in data:
156
+ b = data["base"].strip()
157
+ if b:
158
+ base = b
159
+ provider: ProviderChoice | None = None
160
+ if "provider" in data:
161
+ provider = cast(ProviderChoice, data["provider"])
162
+ tier: ModelTier | None = None
163
+ if "tier" in data:
164
+ tier = cast(ModelTier, data["tier"])
165
+ return base, provider, tier
166
+
167
+
168
+ def set_user_key(key: str, value: str) -> Path:
169
+ """Set KEY in config.json (creates or updates)."""
170
+ k = key.strip()
171
+ if k not in PERSISTED_KEYS:
172
+ raise ValueError(f"Unsupported key {k!r}. Supported: {', '.join(sorted(PERSISTED_KEYS))}")
173
+ path = user_config_path()
174
+ data = _read_raw_config()
175
+ data[k] = value.strip()
176
+ clean = {k2: v2 for k2, v2 in data.items() if k2 in PERSISTED_KEYS}
177
+ validate_persisted_config(clean, path_hint=path)
178
+ _atomic_write_json(path, clean)
179
+ return path
180
+
181
+
182
+ def unset_user_key(key: str) -> Path:
183
+ """Remove KEY from config.json."""
184
+ k = key.strip()
185
+ if k not in PERSISTED_KEYS:
186
+ raise ValueError(f"Unsupported key {k!r}. Supported: {', '.join(sorted(PERSISTED_KEYS))}")
187
+ path = user_config_path()
188
+ if not path.is_file():
189
+ raise ValueError(f"No config file: {path}")
190
+ data = _read_raw_config()
191
+ if k not in data:
192
+ raise ValueError(f"Key {k!r} not found in {path}")
193
+ del data[k]
194
+ clean = {k2: v2 for k2, v2 in data.items() if k2 in PERSISTED_KEYS}
195
+ if clean:
196
+ validate_persisted_config(clean, path_hint=path)
197
+ _atomic_write_json(path, clean)
198
+ else:
199
+ try:
200
+ path.unlink()
201
+ except OSError:
202
+ pass
203
+ return path
prgen/defaults.py ADDED
@@ -0,0 +1,24 @@
1
+ """Model presets per provider: default (efficient) vs pro (strongest)."""
2
+
3
+ from __future__ import annotations
4
+
5
+ from typing import Literal
6
+
7
+ ModelTier = Literal["default", "pro"]
8
+ ProviderChoice = Literal["auto", "openai", "gemini"]
9
+
10
+ OPENAI_MODELS: dict[ModelTier, str] = {
11
+ "default": "gpt-5.4-mini",
12
+ "pro": "gpt-5.4",
13
+ }
14
+
15
+ # Default: gemini-3-flash-preview (standard Flash). For lower cost/latency use
16
+ # gemini-3.1-flash-lite-preview instead.
17
+ GEMINI_MODELS: dict[ModelTier, str] = {
18
+ "default": "gemini-3-flash-preview",
19
+ "pro": "gemini-3.1-pro-preview",
20
+ }
21
+
22
+
23
+ def model_for_tier(backend: Literal["openai", "gemini"], tier: ModelTier) -> str:
24
+ return (OPENAI_MODELS if backend == "openai" else GEMINI_MODELS)[tier]
prgen/git_utils.py ADDED
@@ -0,0 +1,74 @@
1
+ from __future__ import annotations
2
+
3
+ import shutil
4
+ import subprocess
5
+ from pathlib import Path
6
+
7
+
8
+ class GitRefError(ValueError):
9
+ """Raised when a git ref cannot be resolved."""
10
+
11
+
12
+ class GitNotFoundError(RuntimeError):
13
+ """Raised when the git executable is not available."""
14
+
15
+
16
+ def ensure_git_available() -> None:
17
+ """Raise GitNotFoundError if `git` is not installed or not on PATH."""
18
+ if shutil.which("git") is None:
19
+ raise GitNotFoundError(
20
+ "git is not installed or not on your PATH. Install Git and ensure `git` is available."
21
+ )
22
+
23
+
24
+ def run_git_command(args: list[str], *, cwd: Path | None = None) -> str:
25
+ kw: dict = {"capture_output": True, "text": True, "check": True}
26
+ if cwd is not None:
27
+ kw["cwd"] = cwd
28
+ result = subprocess.run(["git", *args], **kw)
29
+ return result.stdout.strip()
30
+
31
+
32
+ def verify_base_ref(base: str, *, cwd: Path | None = None) -> None:
33
+ """Ensure `base` resolves so `git diff base...HEAD` and `git log base..HEAD` work."""
34
+ r = subprocess.run(
35
+ ["git", "rev-parse", "-q", "--verify", base],
36
+ cwd=cwd,
37
+ capture_output=True,
38
+ text=True,
39
+ )
40
+ if r.returncode != 0:
41
+ raise GitRefError(
42
+ f"Cannot resolve --base {base!r}. "
43
+ "If you meant the remote default branch, try: git fetch origin "
44
+ f"or pass an existing local branch, e.g. --base main"
45
+ )
46
+
47
+
48
+ def git_repo_root(path: Path) -> Path:
49
+ """Resolve path and verify it is inside a git working tree."""
50
+ resolved = path.resolve()
51
+ r = subprocess.run(
52
+ ["git", "-C", str(resolved), "rev-parse", "--show-toplevel"],
53
+ capture_output=True,
54
+ text=True,
55
+ )
56
+ if r.returncode != 0:
57
+ msg = (r.stderr or r.stdout or "not a git repository").strip()
58
+ raise ValueError(f"{resolved}: {msg}")
59
+ return Path(r.stdout.strip())
60
+
61
+
62
+ def get_diff(base: str = "origin/main", *, cwd: Path | None = None) -> str:
63
+ return run_git_command(["diff", f"{base}...HEAD"], cwd=cwd)
64
+
65
+
66
+ def get_commits(base: str = "origin/main", *, cwd: Path | None = None) -> str:
67
+ return run_git_command(
68
+ [
69
+ "log",
70
+ "--format=<commit-message %cI>%n%s%n%n%b%n<end-of-commit-message>",
71
+ f"{base}..HEAD",
72
+ ],
73
+ cwd=cwd,
74
+ )
prgen/prompting.py ADDED
@@ -0,0 +1,23 @@
1
+ from importlib.resources import files
2
+
3
+
4
+ def load_prompt_template() -> str:
5
+ return (
6
+ files("prgen")
7
+ .joinpath("prompts")
8
+ .joinpath("pr_description.txt")
9
+ .read_text(encoding="utf-8")
10
+ )
11
+
12
+
13
+ def build_prompt(
14
+ template: str,
15
+ diff: str,
16
+ commits: str,
17
+ additional_context: str,
18
+ ) -> str:
19
+ return (
20
+ template.replace("{{DIFF}}", diff or "No diff provided.")
21
+ .replace("{{COMMITS}}", commits or "No commits provided.")
22
+ .replace("{{ADDITIONAL_CONTEXT}}", additional_context or "none")
23
+ )
@@ -0,0 +1,64 @@
1
+ You are an advanced AI programming assistant and are tasked with summarizing code changes into a concise but meaningful pull request title and description.
2
+
3
+ You will be provided with:
4
+ - a code diff
5
+ - a list of commits
6
+ - optional additional context
7
+
8
+ Your goal is to analyze the changes and create a clear, informative title and description that accurately represents the modifications made to the code.
9
+
10
+ First, examine the following code changes provided in Git diff format:
11
+
12
+ <~~diff~~>
13
+ {{DIFF}}
14
+ </~~diff~~>
15
+
16
+ Then, review the list of commits to help understand the motivation behind the changes and any relevant background information:
17
+
18
+ <~~data~~>
19
+ {{COMMITS}}
20
+ </~~data~~>
21
+
22
+ Now, if provided, use this context to understand the motivation behind the changes and any relevant background information:
23
+
24
+ <~~additional-context~~>
25
+ {{ADDITIONAL_CONTEXT}}
26
+ </~~additional-context~~>
27
+
28
+ To create an effective pull request title and description, follow these steps:
29
+
30
+ 1. Carefully analyze the diff, commit messages, and context, focusing on:
31
+ - The purpose and rationale of the changes
32
+ - Any problems addressed or benefits introduced
33
+ - Any significant logic changes or algorithmic improvements
34
+
35
+ 2. Ensure the following when composing the pull request title and description:
36
+ - Emphasize the "why" of the change, its benefits, or the problem it addresses
37
+ - Use an informal yet professional tone
38
+ - Use a future-oriented manner, third-person singular present tense
39
+ - Be clear and concise
40
+ - Synthesize only meaningful information from the diff and context
41
+ - Avoid outputting code, specific code identifiers, names, or file names unless crucial for understanding
42
+ - Avoid repeating information, broad generalities, and unnecessary phrases
43
+
44
+ 3. Summarize the main purpose of the changes in a single, concise sentence:
45
+ - Start with a third-person singular present tense verb
46
+ - Limit to 50 characters if possible
47
+
48
+ 4. Provide a detailed explanation of the changes:
49
+ - Add line breaks for readability
50
+ - Focus on the "why" rather than only the "what"
51
+ - Structure the body with markdown bullets and headings
52
+
53
+ 5. If relevant, include any issue reference at the end
54
+
55
+ Write your title inside <summary> tags and your description inside <body> tags and include no other text.
56
+
57
+ Output structure:
58
+
59
+ <summary>
60
+ [pull-request-title]
61
+ </summary>
62
+ <body>
63
+ [pull-request-body]
64
+ </body>
@@ -0,0 +1,29 @@
1
+ from dataclasses import dataclass
2
+
3
+ from prgen.defaults import GEMINI_MODELS
4
+
5
+
6
+ @dataclass
7
+ class GeminiProvider:
8
+ api_key: str
9
+ model: str = GEMINI_MODELS["default"]
10
+
11
+ def generate(self, prompt: str) -> str:
12
+ try:
13
+ from google import genai
14
+ from google.genai import types
15
+ except ImportError as exc:
16
+ raise ImportError(
17
+ "The Google GenAI SDK is not installed. Install with: pip install google-genai"
18
+ ) from exc
19
+
20
+ client = genai.Client(api_key=self.api_key)
21
+ response = client.models.generate_content(
22
+ model=self.model,
23
+ contents=prompt,
24
+ config=types.GenerateContentConfig(
25
+ temperature=0.2,
26
+ max_output_tokens=8192,
27
+ ),
28
+ )
29
+ return (response.text or "").strip()
@@ -0,0 +1,29 @@
1
+ from dataclasses import dataclass
2
+
3
+ from prgen.defaults import OPENAI_MODELS
4
+
5
+
6
+ @dataclass
7
+ class OpenAIProvider:
8
+ api_key: str
9
+ model: str = OPENAI_MODELS["default"]
10
+
11
+ def generate(self, prompt: str) -> str:
12
+ try:
13
+ from openai import OpenAI
14
+ except ImportError as exc:
15
+ raise ImportError(
16
+ "The OpenAI SDK is not installed. Install with: pip install openai"
17
+ ) from exc
18
+
19
+ client = OpenAI(api_key=self.api_key)
20
+
21
+ response = client.responses.create(
22
+ model=self.model,
23
+ input=prompt,
24
+ temperature=0.2,
25
+ max_output_tokens=8192,
26
+ store=False,
27
+ )
28
+
29
+ return response.output_text.strip()
prgen/ui.py ADDED
@@ -0,0 +1,87 @@
1
+ """Terminal output and loading indicator (Rich)."""
2
+
3
+ from __future__ import annotations
4
+
5
+ import sys
6
+ from collections.abc import Iterator
7
+ from contextlib import contextmanager
8
+
9
+ from rich.console import Console
10
+ from rich.text import Text
11
+
12
+ from prgen.about import AUTHOR, REPO_URL, package_version
13
+
14
+
15
+ def _stderr_console() -> Console:
16
+ return Console(stderr=True, highlight=False)
17
+
18
+
19
+ def _stdout_console() -> Console:
20
+ # highlight=False: don't treat diff-like output as Python/etc.
21
+ return Console(highlight=False, soft_wrap=True)
22
+
23
+
24
+ def _print_generation_credit(console: Console) -> None:
25
+ """Footer after model output: tool version + author (distinct from PR body for copy/paste)."""
26
+ v = package_version()
27
+ console.print(
28
+ "[dim]—[/dim] [dim]Generated with[/dim] [bold cyan]prgen[/bold cyan] "
29
+ f"[dim]v{v}[/dim] [dim]·[/dim] [dim]{AUTHOR}[/dim]"
30
+ )
31
+ console.print(f"[dim]{REPO_URL}[/dim]")
32
+
33
+
34
+ @contextmanager
35
+ def loading(message: str = "Generating PR summary", *, model: str | None = None) -> Iterator[None]:
36
+ """Show a Rich spinner on stderr while the block runs (no-op if stderr is not a TTY)."""
37
+ if not sys.stderr.isatty():
38
+ yield
39
+ return
40
+
41
+ console = _stderr_console()
42
+ label = f"[bold cyan]{message}[/bold cyan]"
43
+ if model:
44
+ label += f" [dim]{model}[/dim]"
45
+
46
+ with console.status(
47
+ label,
48
+ spinner="dots2",
49
+ spinner_style="cyan",
50
+ speed=0.9,
51
+ ):
52
+ yield
53
+
54
+
55
+ def print_pr_summary(
56
+ title: str | None,
57
+ body: str | None,
58
+ raw_fallback: str,
59
+ ) -> None:
60
+ """
61
+ Print title/body with styled headers; body is plain text (copy/paste friendly, no box borders).
62
+ """
63
+ console = _stdout_console()
64
+
65
+ if title is not None and body is not None:
66
+ console.print()
67
+ console.print("[bold green]PR title[/bold green]")
68
+ # Text: no Rich markup parsing in model output (brackets, markdown).
69
+ console.print(Text(title.strip()))
70
+ console.print()
71
+ console.print("[bold blue]PR description[/bold blue]")
72
+ console.print(Text(body.strip()))
73
+ console.print()
74
+ _print_generation_credit(console)
75
+ console.print()
76
+ return
77
+
78
+ text = raw_fallback.rstrip()
79
+ if not text:
80
+ return
81
+
82
+ console.print()
83
+ console.print("[bold yellow]Model output[/bold yellow] [dim](no <summary>/<body> tags)[/dim]")
84
+ console.print(Text(text))
85
+ console.print()
86
+ _print_generation_credit(console)
87
+ console.print()
@@ -0,0 +1,151 @@
1
+ Metadata-Version: 2.4
2
+ Name: prgen-cli
3
+ Version: 0.2.1
4
+ Summary: Generate PR titles and descriptions from git diff
5
+ Author: Jean Paul Fernandez
6
+ License-Expression: GPL-3.0-only
7
+ Project-URL: Homepage, https://github.com/jpxoi/prgen
8
+ Project-URL: Repository, https://github.com/jpxoi/prgen
9
+ Project-URL: Issues, https://github.com/jpxoi/prgen/issues
10
+ Keywords: cli,git,pull-request,diff,llm,openai,gemini
11
+ Classifier: Development Status :: 4 - Beta
12
+ Classifier: Environment :: Console
13
+ Classifier: Intended Audience :: Developers
14
+ Classifier: Programming Language :: Python :: 3
15
+ Classifier: Programming Language :: Python :: 3 :: Only
16
+ Classifier: Topic :: Software Development :: Version Control
17
+ Requires-Python: >=3.10
18
+ Description-Content-Type: text/markdown
19
+ License-File: LICENSE
20
+ Requires-Dist: google-genai>=1.70.0
21
+ Requires-Dist: openai>=2.30.0
22
+ Requires-Dist: rich>=14.3.3
23
+ Requires-Dist: typer>=0.24.1
24
+ Dynamic: license-file
25
+
26
+ # prgen
27
+
28
+ Generate a pull request title and description from `git diff` and commit history.
29
+
30
+ **Author:** Jean Paul Fernandez · [github.com/jpxoi/prgen](https://github.com/jpxoi/prgen)
31
+
32
+ Licensed under the [GNU General Public License v3.0](LICENSE) (GPL-3.0-only): you may share and modify this program under those terms; see the license file for full conditions.
33
+
34
+ ## Requirements
35
+
36
+ - **Python** 3.10+
37
+ - **`git`** on your `PATH` (install Git and ensure `git` works in a terminal)
38
+
39
+ ## Install
40
+
41
+ From PyPI, install the **`prgen-cli`** package; the **`prgen`** command is what you run in the terminal.
42
+
43
+ From [PyPI](https://pypi.org/project/prgen-cli/):
44
+
45
+ ```bash
46
+ pip install prgen-cli
47
+ # or
48
+ uv tool install prgen-cli
49
+ ```
50
+
51
+ From a clone (installs all dependencies):
52
+
53
+ ```bash
54
+ uv sync
55
+ ```
56
+
57
+ Formatting / lint (see `[tool.ruff]` in `pyproject.toml`):
58
+
59
+ ```bash
60
+ uv run ruff format .
61
+ uv run ruff check .
62
+ ```
63
+
64
+ ## Setup
65
+
66
+ Store settings in **`~/.config/prgen/config.json`** using **`prgen config`** (interactive prompts, or `prgen config set …`). Supported fields:
67
+
68
+ - `OPENAI_API_KEY` — OpenAI API key (also merged into the environment when you run `prgen`, if not already set)
69
+ - `GOOGLE_API_KEY` — Google AI Studio / Gemini API key (same behavior)
70
+ - `base` — optional default for **`--base`** (see [Defaults](#defaults) below)
71
+ - `provider` — optional default for **`--provider`**
72
+ - `tier` — optional default for **`--tier`**
73
+
74
+ Override the config directory with **`XDG_CONFIG_HOME`** (file is `$XDG_CONFIG_HOME/prgen/config.json`).
75
+
76
+ ## Defaults
77
+
78
+ Defaults apply to the **main** `prgen` command (generating a PR summary), not to `prgen config …` subcommands.
79
+
80
+ ### Built-in CLI defaults (no config file)
81
+
82
+ If you do not pass a flag and the key is **not** set in `config.json`, prgen uses:
83
+
84
+ | What | Default | Meaning |
85
+ | ---: | ---: | ---: |
86
+ | Git repo | current directory | Diff and log are for the repo you run from. Use **`-C` / `--repo`** for another tree. |
87
+ | **`--base`** | `origin/main` | Compare `HEAD` to this ref (`git diff` / `git log` vs merge base). The ref must exist locally (e.g. run **`git fetch origin`** or use a local branch like `main`). |
88
+ | **`--provider`** | `auto` | Pick a backend from your API keys: **Gemini** if `GOOGLE_API_KEY` is set, otherwise **OpenAI** if `OPENAI_API_KEY` is set. If neither is set, prgen exits with an error until you configure keys. |
89
+ | **`--tier`** | `default` | Cheaper/faster model preset. **`pro`** selects a stronger model for the chosen provider. |
90
+ | **`--model`** | *(unset)* | Use the model for the current **`--tier`** (see presets in the source under `defaults.py`). **`--model ID`** overrides **`--tier`** entirely. |
91
+ | **`--context`** | `none` | Extra text merged into the prompt; the literal string **`none`** means “no extra context” unless you change it. |
92
+
93
+ ### Optional overrides from `config.json`
94
+
95
+ If **`base`**, **`provider`**, or **`tier`** appear in `config.json`, they **replace** the built-in defaults **only when you do not pass the matching flag**.
96
+
97
+ - Example: `base` in JSON is `main`, and you run **`prgen`** with no **`--base`** → effective base is **`main`**.
98
+ - Example: same config, but you run **`prgen --base origin/main`** → **`--base`** wins; the file is ignored for that run.
99
+
100
+ **`--model`** and **`--context`** are not read from the JSON file; only **`base`**, **`provider`**, and **`tier`** are.
101
+
102
+ ### Summary
103
+
104
+ - **Config file** fills in **`--base`**, **`--provider`**, and **`--tier`** when those flags are omitted.
105
+ - **Explicit flags** always override the file.
106
+ - **`auto`** chooses Gemini vs OpenAI from which API keys are available (see **`--provider`** above).
107
+
108
+ ### Set keys from the CLI
109
+
110
+ Writes go to **`~/.config/prgen/config.json`** (see `prgen config path`). Allowed keys: **`OPENAI_API_KEY`**, **`GOOGLE_API_KEY`**, **`base`**, **`provider`**, **`tier`**.
111
+
112
+ ```bash
113
+ prgen config # wizard: API keys + base, provider, tier (Rich prompts)
114
+ prgen config set OPENAI_API_KEY sk-...
115
+ prgen config set GOOGLE_API_KEY - < key.txt # value read from stdin
116
+ prgen config unset OPENAI_API_KEY
117
+ prgen config show # safe summary (keys as set/unset; colors if TTY)
118
+ prgen config path # print path to the file
119
+ ```
120
+
121
+ ## Run on another project
122
+
123
+ Use **`-C`** (same idea as `git -C`) so you can run `prgen` from anywhere:
124
+
125
+ ```bash
126
+ prgen -C ~/src/my-application
127
+ ```
128
+
129
+ If `prgen` is not on your `PATH`, invoke it via `uv` from the prgen repo:
130
+
131
+ ```bash
132
+ uv run --directory /path/to/prgen prgen -C ~/src/my-application
133
+ ```
134
+
135
+ ## Install globally (optional)
136
+
137
+ From the prgen project directory:
138
+
139
+ ```bash
140
+ uv tool install .
141
+ # or
142
+ pipx install .
143
+ ```
144
+
145
+ Then `prgen` is available in new shells (ensure your tool install `bin` directory is on `PATH`).
146
+
147
+ ## Options
148
+
149
+ See **`prgen --help`** for all flags. In addition to [Defaults](#defaults):
150
+
151
+ - If there is **no diff and no commits** compared to **`--base`**, prgen exits with an error (nothing to summarize).