gac 1.2.4__tar.gz → 1.2.6__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of gac might be problematic. Click here for more details.

Files changed (29) hide show
  1. {gac-1.2.4 → gac-1.2.6}/PKG-INFO +2 -2
  2. {gac-1.2.4 → gac-1.2.6}/README.md +1 -1
  3. {gac-1.2.4 → gac-1.2.6}/src/gac/__version__.py +1 -1
  4. {gac-1.2.4 → gac-1.2.6}/src/gac/ai.py +2 -0
  5. {gac-1.2.4 → gac-1.2.6}/src/gac/init_cli.py +3 -2
  6. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/__init__.py +2 -0
  7. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/groq.py +3 -2
  8. gac-1.2.6/src/gac/providers/zai.py +43 -0
  9. {gac-1.2.4 → gac-1.2.6}/.gitignore +0 -0
  10. {gac-1.2.4 → gac-1.2.6}/LICENSE +0 -0
  11. {gac-1.2.4 → gac-1.2.6}/pyproject.toml +0 -0
  12. {gac-1.2.4 → gac-1.2.6}/src/gac/__init__.py +0 -0
  13. {gac-1.2.4 → gac-1.2.6}/src/gac/ai_utils.py +0 -0
  14. {gac-1.2.4 → gac-1.2.6}/src/gac/cli.py +0 -0
  15. {gac-1.2.4 → gac-1.2.6}/src/gac/config.py +0 -0
  16. {gac-1.2.4 → gac-1.2.6}/src/gac/config_cli.py +0 -0
  17. {gac-1.2.4 → gac-1.2.6}/src/gac/constants.py +0 -0
  18. {gac-1.2.4 → gac-1.2.6}/src/gac/diff_cli.py +0 -0
  19. {gac-1.2.4 → gac-1.2.6}/src/gac/errors.py +0 -0
  20. {gac-1.2.4 → gac-1.2.6}/src/gac/git.py +0 -0
  21. {gac-1.2.4 → gac-1.2.6}/src/gac/main.py +0 -0
  22. {gac-1.2.4 → gac-1.2.6}/src/gac/preprocess.py +0 -0
  23. {gac-1.2.4 → gac-1.2.6}/src/gac/prompt.py +0 -0
  24. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/anthropic.py +0 -0
  25. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/cerebras.py +0 -0
  26. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/ollama.py +0 -0
  27. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/openai.py +0 -0
  28. {gac-1.2.4 → gac-1.2.6}/src/gac/providers/openrouter.py +0 -0
  29. {gac-1.2.4 → gac-1.2.6}/src/gac/utils.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gac
3
- Version: 1.2.4
3
+ Version: 1.2.6
4
4
  Summary: AI-powered Git commit message generator with multi-provider support
5
5
  Project-URL: Homepage, https://github.com/cellwebb/gac
6
6
  Project-URL: Documentation, https://github.com/cellwebb/gac#readme
@@ -56,7 +56,7 @@ Description-Content-Type: text/markdown
56
56
 
57
57
  - **LLM-Powered Commit Messages:** Automatically generates clear, concise, and context-aware commit messages using large language models.
58
58
  - **Deep Contextual Analysis:** Understands your code by analyzing staged changes, repository structure, and recent commit history to provide highly relevant suggestions.
59
- - **Multi-Provider & Model Support:** Flexibly works with various leading AI providers (like Anthropic, Cerebras, Groq, OpenRouter, OpenAI) and models, easily configured through an interactive setup or environment variables.
59
+ - **Multi-Provider & Model Support:** Flexibly works with various leading AI providers (like Anthropic, Cerebras, Groq, OpenRouter, OpenAI, Z.AI) and models, easily configured through an interactive setup or environment variables.
60
60
  - **Seamless Git Workflow:** Integrates smoothly into your existing Git routine as a simple drop-in replacement for `git commit`.
61
61
  - **Extensive Customization:** Tailor commit messages to your needs with a rich set of flags, including one-liners (`-o`), AI hints (`-h`), scope inference (`-s`), and specific model selection (`-m`).
62
62
  - **Streamlined Workflow Commands:** Boost your productivity with convenient options to stage all changes (`-a`), auto-confirm commits (`-y`), and push to your remote repository (`-p`) in a single step.
@@ -14,7 +14,7 @@
14
14
 
15
15
  - **LLM-Powered Commit Messages:** Automatically generates clear, concise, and context-aware commit messages using large language models.
16
16
  - **Deep Contextual Analysis:** Understands your code by analyzing staged changes, repository structure, and recent commit history to provide highly relevant suggestions.
17
- - **Multi-Provider & Model Support:** Flexibly works with various leading AI providers (like Anthropic, Cerebras, Groq, OpenRouter, OpenAI) and models, easily configured through an interactive setup or environment variables.
17
+ - **Multi-Provider & Model Support:** Flexibly works with various leading AI providers (like Anthropic, Cerebras, Groq, OpenRouter, OpenAI, Z.AI) and models, easily configured through an interactive setup or environment variables.
18
18
  - **Seamless Git Workflow:** Integrates smoothly into your existing Git routine as a simple drop-in replacement for `git commit`.
19
19
  - **Extensive Customization:** Tailor commit messages to your needs with a rich set of flags, including one-liners (`-o`), AI hints (`-h`), scope inference (`-s`), and specific model selection (`-m`).
20
20
  - **Streamlined Workflow Commands:** Boost your productivity with convenient options to stage all changes (`-a`), auto-confirm commits (`-y`), and push to your remote repository (`-p`) in a single step.
@@ -1,3 +1,3 @@
1
1
  """Version information for gac package."""
2
2
 
3
- __version__ = "1.2.4"
3
+ __version__ = "1.2.6"
@@ -16,6 +16,7 @@ from gac.providers import (
16
16
  call_ollama_api,
17
17
  call_openai_api,
18
18
  call_openrouter_api,
19
+ call_zai_api,
19
20
  )
20
21
 
21
22
  logger = logging.getLogger(__name__)
@@ -67,6 +68,7 @@ def generate_commit_message(
67
68
  "cerebras": call_cerebras_api,
68
69
  "ollama": call_ollama_api,
69
70
  "openrouter": call_openrouter_api,
71
+ "zai": call_zai_api,
70
72
  }
71
73
 
72
74
  # Generate the commit message using centralized retry logic
@@ -24,15 +24,16 @@ def init() -> None:
24
24
  ("Cerebras", "qwen-3-coder-480b"),
25
25
  ("Groq", "meta-llama/llama-4-maverick-17b-128e-instruct"),
26
26
  ("Ollama", "gemma3"),
27
- ("OpenRouter", "openrouter/auto"),
28
27
  ("OpenAI", "gpt-4.1-mini"),
28
+ ("OpenRouter", "openrouter/auto"),
29
+ ("Z.AI", "glm-4.6"),
29
30
  ]
30
31
  provider_names = [p[0] for p in providers]
31
32
  provider = questionary.select("Select your provider:", choices=provider_names).ask()
32
33
  if not provider:
33
34
  click.echo("Provider selection cancelled. Exiting.")
34
35
  return
35
- provider_key = provider.lower()
36
+ provider_key = provider.lower().replace(".", "")
36
37
  model_suggestion = dict(providers)[provider]
37
38
  model = questionary.text(f"Enter the model (default: {model_suggestion}):", default=model_suggestion).ask()
38
39
  model_to_save = model.strip() if model.strip() else model_suggestion
@@ -6,6 +6,7 @@ from .groq import call_groq_api
6
6
  from .ollama import call_ollama_api
7
7
  from .openai import call_openai_api
8
8
  from .openrouter import call_openrouter_api
9
+ from .zai import call_zai_api
9
10
 
10
11
  __all__ = [
11
12
  "call_anthropic_api",
@@ -14,4 +15,5 @@ __all__ = [
14
15
  "call_ollama_api",
15
16
  "call_openai_api",
16
17
  "call_openrouter_api",
18
+ "call_zai_api",
17
19
  ]
@@ -36,8 +36,9 @@ def call_groq_api(model: str, messages: list[dict], temperature: float, max_toke
36
36
  content = choice["message"]["content"]
37
37
  logger.debug(f"Found content in message.content: {repr(content)}")
38
38
  if content is None:
39
- logger.warning("Groq API returned None content in message.content")
40
- return ""
39
+ raise AIError.model_error("Groq API returned null content")
40
+ if content == "":
41
+ raise AIError.model_error("Groq API returned empty content")
41
42
  return content
42
43
  elif "text" in choice:
43
44
  content = choice["text"]
@@ -0,0 +1,43 @@
1
+ """Z.AI API provider for gac."""
2
+
3
+ import os
4
+
5
+ import httpx
6
+
7
+ from gac.errors import AIError
8
+
9
+
10
+ def call_zai_api(model: str, messages: list[dict], temperature: float, max_tokens: int) -> str:
11
+ """Call Z.AI API directly."""
12
+ api_key = os.getenv("ZAI_API_KEY")
13
+ if not api_key:
14
+ raise AIError.model_error("ZAI_API_KEY not found in environment variables")
15
+
16
+ url = "https://api.z.ai/api/paas/v4/chat/completions"
17
+ headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
18
+
19
+ data = {"model": model, "messages": messages, "temperature": temperature, "max_tokens": max_tokens}
20
+
21
+ try:
22
+ response = httpx.post(url, headers=headers, json=data, timeout=120)
23
+ response.raise_for_status()
24
+ response_data = response.json()
25
+
26
+ # Handle different possible response structures
27
+ if "choices" in response_data and len(response_data["choices"]) > 0:
28
+ choice = response_data["choices"][0]
29
+ if "message" in choice and "content" in choice["message"]:
30
+ content = choice["message"]["content"]
31
+ if content is None:
32
+ raise AIError.model_error("Z.AI API returned null content")
33
+ if content == "":
34
+ raise AIError.model_error("Z.AI API returned empty content")
35
+ return content
36
+ else:
37
+ raise AIError.model_error(f"Z.AI API response missing content: {response_data}")
38
+ else:
39
+ raise AIError.model_error(f"Z.AI API unexpected response structure: {response_data}")
40
+ except httpx.HTTPStatusError as e:
41
+ raise AIError.model_error(f"Z.AI API error: {e.response.status_code} - {e.response.text}") from e
42
+ except Exception as e:
43
+ raise AIError.model_error(f"Error calling Z.AI API: {str(e)}") from e
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes