gac 1.5.1__py3-none-any.whl → 1.5.2__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gac might be problematic. Click here for more details.

gac/__init__.py CHANGED
@@ -4,12 +4,6 @@ from gac.__version__ import __version__
4
4
  from gac.ai import generate_commit_message
5
5
  from gac.git import get_staged_files, push_changes
6
6
  from gac.prompt import build_prompt, clean_commit_message
7
- from gac.providers.anthropic import call_anthropic_api as anthropic_generate
8
- from gac.providers.cerebras import call_cerebras_api as cerebras_generate
9
- from gac.providers.groq import call_groq_api as groq_generate
10
- from gac.providers.ollama import call_ollama_api as ollama_generate
11
- from gac.providers.openai import call_openai_api as openai_generate
12
- from gac.providers.openrouter import call_openrouter_api as openrouter_generate
13
7
 
14
8
  __all__ = [
15
9
  "__version__",
@@ -18,10 +12,4 @@ __all__ = [
18
12
  "clean_commit_message",
19
13
  "get_staged_files",
20
14
  "push_changes",
21
- "anthropic_generate",
22
- "cerebras_generate",
23
- "groq_generate",
24
- "ollama_generate",
25
- "openai_generate",
26
- "openrouter_generate",
27
15
  ]
gac/__version__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Version information for gac package."""
2
2
 
3
- __version__ = "1.5.1"
3
+ __version__ = "1.5.2"
gac/ai.py CHANGED
@@ -18,6 +18,7 @@ from gac.providers import (
18
18
  call_ollama_api,
19
19
  call_openai_api,
20
20
  call_openrouter_api,
21
+ call_streamlake_api,
21
22
  call_zai_api,
22
23
  call_zai_coding_api,
23
24
  )
@@ -71,6 +72,7 @@ def generate_commit_message(
71
72
  "cerebras": call_cerebras_api,
72
73
  "ollama": call_ollama_api,
73
74
  "openrouter": call_openrouter_api,
75
+ "streamlake": call_streamlake_api,
74
76
  "zai": call_zai_api,
75
77
  "zai-coding": call_zai_coding_api,
76
78
  "gemini": call_gemini_api,
gac/ai_utils.py CHANGED
@@ -102,6 +102,7 @@ def generate_with_retries(
102
102
  "ollama",
103
103
  "openai",
104
104
  "openrouter",
105
+ "streamlake",
105
106
  "zai",
106
107
  "zai-coding",
107
108
  ]
gac/init_cli.py CHANGED
@@ -9,6 +9,18 @@ from dotenv import set_key
9
9
  GAC_ENV_PATH = Path.home() / ".gac.env"
10
10
 
11
11
 
12
+ def _prompt_required_text(prompt: str) -> str | None:
13
+ """Prompt until a non-empty string is provided or the user cancels."""
14
+ while True:
15
+ response = questionary.text(prompt).ask()
16
+ if response is None:
17
+ return None
18
+ value = response.strip()
19
+ if value:
20
+ return value
21
+ click.echo("A value is required. Please try again.")
22
+
23
+
12
24
  @click.command()
13
25
  def init() -> None:
14
26
  """Interactively set up $HOME/.gac.env for gac."""
@@ -24,10 +36,11 @@ def init() -> None:
24
36
  ("Cerebras", "qwen-3-coder-480b"),
25
37
  ("Gemini", "gemini-2.5-flash"),
26
38
  ("Groq", "meta-llama/llama-4-maverick-17b-128e-instruct"),
27
- ("LM Studio", "deepseek-r1-distill-qwen-7b"),
39
+ ("LM Studio", "gemma3"),
28
40
  ("Ollama", "gemma3"),
29
41
  ("OpenAI", "gpt-4.1-mini"),
30
42
  ("OpenRouter", "openrouter/auto"),
43
+ ("Streamlake", ""),
31
44
  ("Z.AI", "glm-4.5-air"),
32
45
  ("Z.AI Coding", "glm-4.6"),
33
46
  ]
@@ -37,17 +50,67 @@ def init() -> None:
37
50
  click.echo("Provider selection cancelled. Exiting.")
38
51
  return
39
52
  provider_key = provider.lower().replace(".", "").replace(" ", "-")
40
- model_suggestion = dict(providers)[provider]
41
- model = questionary.text(f"Enter the model (default: {model_suggestion}):", default=model_suggestion).ask()
42
- model_to_save = model.strip() if model.strip() else model_suggestion
53
+
54
+ is_ollama = provider_key == "ollama"
55
+ is_lmstudio = provider_key == "lm-studio"
56
+ is_streamlake = provider_key == "streamlake"
57
+ is_zai = provider_key in ("zai", "zai-coding")
58
+
59
+ if is_streamlake:
60
+ endpoint_id = _prompt_required_text("Enter the Streamlake inference endpoint ID (required):")
61
+ if endpoint_id is None:
62
+ click.echo("Streamlake configuration cancelled. Exiting.")
63
+ return
64
+ model_to_save = endpoint_id
65
+ else:
66
+ model_suggestion = dict(providers)[provider]
67
+ model_prompt = f"Enter the model (default: {model_suggestion}):"
68
+ model = questionary.text(model_prompt, default=model_suggestion).ask()
69
+ if model is None:
70
+ click.echo("Model entry cancelled. Exiting.")
71
+ return
72
+ model_to_save = model.strip() if model.strip() else model_suggestion
73
+
43
74
  set_key(str(GAC_ENV_PATH), "GAC_MODEL", f"{provider_key}:{model_to_save}")
44
75
  click.echo(f"Set GAC_MODEL={provider_key}:{model_to_save}")
45
76
 
46
- api_key = questionary.password("Enter your API key (input hidden, can be set later):").ask()
77
+ if is_ollama:
78
+ url_default = "http://localhost:11434"
79
+ url = questionary.text(f"Enter the Ollama API URL (default: {url_default}):", default=url_default).ask()
80
+ if url is None:
81
+ click.echo("Ollama URL entry cancelled. Exiting.")
82
+ return
83
+ url_to_save = url.strip() if url.strip() else url_default
84
+ set_key(str(GAC_ENV_PATH), "OLLAMA_API_URL", url_to_save)
85
+ click.echo(f"Set OLLAMA_API_URL={url_to_save}")
86
+ elif is_lmstudio:
87
+ url_default = "http://localhost:1234"
88
+ url = questionary.text(f"Enter the LM Studio API URL (default: {url_default}):", default=url_default).ask()
89
+ if url is None:
90
+ click.echo("LM Studio URL entry cancelled. Exiting.")
91
+ return
92
+ url_to_save = url.strip() if url.strip() else url_default
93
+ set_key(str(GAC_ENV_PATH), "LMSTUDIO_API_URL", url_to_save)
94
+ click.echo(f"Set LMSTUDIO_API_URL={url_to_save}")
95
+
96
+ api_key_prompt = "Enter your API key (input hidden, can be set later):"
97
+ if is_ollama or is_lmstudio:
98
+ click.echo(
99
+ "This provider typically runs locally. API keys are optional unless your instance requires authentication."
100
+ )
101
+ api_key_prompt = "Enter your API key (optional, press Enter to skip):"
102
+
103
+ api_key = questionary.password(api_key_prompt).ask()
47
104
  if api_key:
48
- # Z.AI and Z.AI Coding both use the same API key
49
- api_key_name = "ZAI_API_KEY" if provider_key in ["zai", "zai-coding"] else f"{provider_key.upper()}_API_KEY"
105
+ if is_zai:
106
+ api_key_name = "ZAI_API_KEY"
107
+ elif is_lmstudio:
108
+ api_key_name = "LMSTUDIO_API_KEY"
109
+ else:
110
+ api_key_name = f"{provider_key.upper()}_API_KEY"
50
111
  set_key(str(GAC_ENV_PATH), api_key_name, api_key)
51
112
  click.echo(f"Set {api_key_name} (hidden)")
113
+ elif is_ollama or is_lmstudio:
114
+ click.echo("Skipping API key. You can add one later if needed.")
52
115
 
53
116
  click.echo(f"\ngac environment setup complete. You can edit {GAC_ENV_PATH} to update values later.")
gac/main.py CHANGED
@@ -59,7 +59,7 @@ def main(
59
59
  if model is None:
60
60
  handle_error(
61
61
  AIError.model_error(
62
- "No model specified. Please set the GAC_MODEL environment variable or use --model."
62
+ "gac init hasn't been run yet. Please run 'gac init' to set up your configuration, then try again."
63
63
  ),
64
64
  exit_program=True,
65
65
  )
gac/providers/__init__.py CHANGED
@@ -8,6 +8,7 @@ from .lmstudio import call_lmstudio_api
8
8
  from .ollama import call_ollama_api
9
9
  from .openai import call_openai_api
10
10
  from .openrouter import call_openrouter_api
11
+ from .streamlake import call_streamlake_api
11
12
  from .zai import call_zai_api, call_zai_coding_api
12
13
 
13
14
  __all__ = [
@@ -19,6 +20,7 @@ __all__ = [
19
20
  "call_ollama_api",
20
21
  "call_openai_api",
21
22
  "call_openrouter_api",
23
+ "call_streamlake_api",
22
24
  "call_zai_api",
23
25
  "call_zai_coding_api",
24
26
  ]
@@ -0,0 +1,47 @@
1
+ """StreamLake (Vanchin) API provider for gac."""
2
+
3
+ import os
4
+
5
+ import httpx
6
+
7
+ from gac.errors import AIError
8
+
9
+
10
+ def call_streamlake_api(model: str, messages: list[dict], temperature: float, max_tokens: int) -> str:
11
+ """Call StreamLake (Vanchin) chat completions API."""
12
+ api_key = os.getenv("STREAMLAKE_API_KEY") or os.getenv("VC_API_KEY")
13
+ if not api_key:
14
+ raise AIError.model_error(
15
+ "STREAMLAKE_API_KEY not found in environment variables (VC_API_KEY alias also not set)"
16
+ )
17
+
18
+ url = "https://vanchin.streamlake.ai/api/gateway/v1/endpoints/chat/completions"
19
+ headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
20
+
21
+ data = {
22
+ "model": model,
23
+ "messages": messages,
24
+ "temperature": temperature,
25
+ "max_tokens": max_tokens,
26
+ }
27
+
28
+ try:
29
+ response = httpx.post(url, headers=headers, json=data, timeout=120)
30
+ response.raise_for_status()
31
+ response_data = response.json()
32
+ choices = response_data.get("choices")
33
+ if not choices:
34
+ raise AIError.model_error("StreamLake API returned no choices")
35
+
36
+ message = choices[0].get("message", {})
37
+ content = message.get("content")
38
+ if content is None:
39
+ raise AIError.model_error("StreamLake API returned null content")
40
+ if content == "":
41
+ raise AIError.model_error("StreamLake API returned empty content")
42
+
43
+ return content
44
+ except httpx.HTTPStatusError as e:
45
+ raise AIError.model_error(f"StreamLake API error: {e.response.status_code} - {e.response.text}") from e
46
+ except Exception as e: # noqa: BLE001 - convert to AIError
47
+ raise AIError.model_error(f"Error calling StreamLake API: {str(e)}") from e
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gac
3
- Version: 1.5.1
3
+ Version: 1.5.2
4
4
  Summary: AI-powered Git commit message generator with multi-provider support
5
5
  Project-URL: Homepage, https://github.com/cellwebb/gac
6
6
  Project-URL: Documentation, https://github.com/cellwebb/gac#readme
@@ -56,7 +56,7 @@ Description-Content-Type: text/markdown
56
56
 
57
57
  - **LLM-Powered Commit Messages:** Automatically generates clear, concise, and context-aware commit messages using large language models.
58
58
  - **Deep Contextual Analysis:** Understands your code by analyzing staged changes, repository structure, and recent commit history to provide highly relevant suggestions.
59
- - **Multi-Provider & Model Support:** Flexibly works with leading AI providers (Anthropic, Cerebras, Groq, Ollama, OpenAI, OpenRouter, Z.AI) and models, easily configured through an interactive setup or environment variables.
59
+ - **Multi-Provider & Model Support:** Flexibly works with leading AI providers (Anthropic, Cerebras, Gemini, Groq, OpenAI, OpenRouter, Streamlake/Vanchin, Z.AI) and local providers (LM Studio, Ollama), easily configured through an interactive setup or environment variables.
60
60
  - **Seamless Git Workflow:** Integrates smoothly into your existing Git routine as a simple drop-in replacement for `git commit`.
61
61
  - **Extensive Customization:** Tailor commit messages to your needs with a rich set of flags, including one-liners (`-o`), AI hints (`-h`), scope inference (`-s`), and specific model selection (`-m`).
62
62
  - **Streamlined Workflow Commands:** Boost your productivity with convenient options to stage all changes (`-a`), auto-confirm commits (`-y`), and push to your remote repository (`-p`) in a single step.
@@ -137,9 +137,6 @@ Example `$HOME/.gac.env` output:
137
137
  ```env
138
138
  GAC_MODEL=anthropic:claude-3-5-haiku-latest
139
139
  ANTHROPIC_API_KEY=your_anthropic_key_here
140
- # Optional: configure OpenRouter
141
- # GAC_MODEL=openrouter:openrouter/auto
142
- # OPENROUTER_API_KEY=your_openrouter_key_here
143
140
  ```
144
141
 
145
142
  Alternatively, you can configure `gac` using environment variables or by manually creating/editing the configuration file.
@@ -148,6 +145,9 @@ Alternatively, you can configure `gac` using environment variables or by manuall
148
145
 
149
146
  You can manage settings in your `$HOME/.gac.env` file using `gac config` commands:
150
147
 
148
+ - Streamlake uses inference endpoint IDs instead of model names. When prompted, paste the exact endpoint ID from the Streamlake console.
149
+ - For local providers like Ollama and LM Studio, gac will ask for the base API URL. API keys are optional for these providers unless your instance requires authentication.
150
+
151
151
  - Show config: `gac config show`
152
152
  - Set a value: `gac config set GAC_MODEL groq:meta-llama/llama-4-scout-17b-16e-instruct`
153
153
  - Get a value: `gac config get GAC_MODEL`
@@ -1,7 +1,7 @@
1
- gac/__init__.py,sha256=HFWgSVNbTAFhgetCRWI1WrtyE7zC7IHvoBOrfDGUurM,989
2
- gac/__version__.py,sha256=jqo2PBOQzwZYj1cBEdd7cR5I7BpjI5t9dhuiXIVR5hA,66
3
- gac/ai.py,sha256=iOxHt1HHmTfut6pQ5Iy0jr0LnxOlaDVKKDVt3_1Yhg0,3323
4
- gac/ai_utils.py,sha256=8PczY1uq9N9LnvLPjLeMsWEFDfBsZQkPUpmcBxJdifs,7209
1
+ gac/__init__.py,sha256=z9yGInqtycFIT3g1ca24r-A3699hKVaRqGUI79wsmMc,415
2
+ gac/__version__.py,sha256=7cESebHKTY17XWEusC_fCBOxkQ6a0skzAZebJRcVxSo,66
3
+ gac/ai.py,sha256=lbtF-BhkDRg0mucfXhr3-g6FXLUBAmTT-sZ5eBqfT4g,3391
4
+ gac/ai_utils.py,sha256=rn5u6PNM5PbsOHChkjsGhkAi-nZ8VztFDLvmyU1wGGc,7231
5
5
  gac/cli.py,sha256=nvz6l-wctfo3SMpC-zqtXyHMg8rtdzxw9cllbVMXJ0w,4872
6
6
  gac/config.py,sha256=N62phuLUyVj54eLDiDL6VN8-2_Zt6yB5zsnimFavU3I,1630
7
7
  gac/config_cli.py,sha256=v9nFHZO1RvK9fzHyuUS6SG-BCLHMsdOMDwWamBhVVh4,1608
@@ -9,13 +9,13 @@ gac/constants.py,sha256=hGzmLGhVDB2KPIqwtl6tHMNuSwHj-2P1RK0cGm4pyNA,4962
9
9
  gac/diff_cli.py,sha256=wnVQ9OFGnM0d2Pj9WVjWbo0jxqIuRHVAwmb8wU9Pa3E,5676
10
10
  gac/errors.py,sha256=ysDIVRCd0YQVTOW3Q6YzdolxCdtkoQCAFf3_jrqbjUY,7916
11
11
  gac/git.py,sha256=MS2m4fv8h4mau1djFG1aje9NXTmkGsjPO9w18LqNGX0,6031
12
- gac/init_cli.py,sha256=d2wTJLXF4czVZEL7H38CcveXi6KO96L4aZQn8vqdqks,2203
13
- gac/main.py,sha256=igiUnkdDG5akjcPHa2iCfqstziYifGmyGegP2k6g_c4,15273
12
+ gac/init_cli.py,sha256=xJDniA516eT_lMnro8zN7fZepUorLjLKDdRe44ssN_o,4628
13
+ gac/main.py,sha256=5yL5bgTzfps3b3Nx9Obie8dyxa0VYlQ3Vot5Uw-dmr4,15289
14
14
  gac/preprocess.py,sha256=krrLPHsccYMdn_YAtUrppBJIoRgevxGWusDwhE40LEo,15366
15
15
  gac/prompt.py,sha256=K6r9q2cAlyPu1fud6-jJsZ4zeweEo3yt6_WeYv8a_SQ,17087
16
16
  gac/security.py,sha256=M1MZm6BLOeKl6rH_-UdXsSKol39FnA5fIP3YP394yZE,9898
17
17
  gac/utils.py,sha256=W3ladtmsH01MNLdckQYTzYrYbTGEdzCKI36he9C-y_E,3945
18
- gac/providers/__init__.py,sha256=3mFyeXIfNWTRYr7QbMv-L5PNBWRzQRfHXdXVM1GJY3s,678
18
+ gac/providers/__init__.py,sha256=W4KbNHlazRFeX1h4ekBKXypCPG0czHuz_Z5AUo0J0js,749
19
19
  gac/providers/anthropic.py,sha256=U9gz1Qy7uH1FwG4zSGHSYhVQzL2NFCbkmJM8NmupSkw,1749
20
20
  gac/providers/cerebras.py,sha256=XrpgVYzkmQXnK4Jjct_HeXa906m61we45oSiI5l7idw,1363
21
21
  gac/providers/gemini.py,sha256=FgXU_ne8G0DobZPkfLTLmr-qexvsSFVee4ZILPjw_RY,2628
@@ -24,9 +24,10 @@ gac/providers/lmstudio.py,sha256=TwAMFmz5b4zMzZb-trAOhHiLUWRZJl9XS9Rv9x9v7jI,186
24
24
  gac/providers/ollama.py,sha256=E89fY_jj8TBoRdWniCXcXrm0ZURnL-Gz4vuQI1mpE9U,1823
25
25
  gac/providers/openai.py,sha256=p4Mox372Ketm3s4Iik_WXxW6wbTS28CC4G7Mtojfo8U,1349
26
26
  gac/providers/openrouter.py,sha256=3ZsxL1n6Xi1Ylu8DH3z4JI9FBM4RvMg-asQ-xPciWtQ,2065
27
+ gac/providers/streamlake.py,sha256=Q-4Lv4DdThOL3BkS0fq8z2QOXNo_RWelovQGBkLPfeo,1758
27
28
  gac/providers/zai.py,sha256=J9SogoU-K-Gl3jgyWWJtVqqoUDXMUNodolf7Qrx3CBY,2450
28
- gac-1.5.1.dist-info/METADATA,sha256=8WnjncUwyO4pllItkiAjjzcIiEfqoytwKEFiGlpcT4A,9518
29
- gac-1.5.1.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
30
- gac-1.5.1.dist-info/entry_points.txt,sha256=tdjN-XMmcWfL92swuRAjT62bFLOAwk9bTMRLGP5Z4aI,36
31
- gac-1.5.1.dist-info/licenses/LICENSE,sha256=vOab37NouL1PNs5BswnPayrMCqaN2sqLfMQfqPDrpZg,1103
32
- gac-1.5.1.dist-info/RECORD,,
29
+ gac-1.5.2.dist-info/METADATA,sha256=wc8g_1kHoY1ylPPHvge2diTEkOC2-1hiwa4sJ6-qpyY,9759
30
+ gac-1.5.2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
31
+ gac-1.5.2.dist-info/entry_points.txt,sha256=tdjN-XMmcWfL92swuRAjT62bFLOAwk9bTMRLGP5Z4aI,36
32
+ gac-1.5.2.dist-info/licenses/LICENSE,sha256=vOab37NouL1PNs5BswnPayrMCqaN2sqLfMQfqPDrpZg,1103
33
+ gac-1.5.2.dist-info/RECORD,,
File without changes