gac 1.12.0__py3-none-any.whl → 1.13.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
gac/__version__.py CHANGED
@@ -1,3 +1,3 @@
1
1
  """Version information for gac package."""
2
2
 
3
- __version__ = "1.12.0"
3
+ __version__ = "1.13.0"
gac/ai.py CHANGED
@@ -13,6 +13,8 @@ from gac.providers import (
13
13
  call_anthropic_api,
14
14
  call_cerebras_api,
15
15
  call_chutes_api,
16
+ call_custom_anthropic_api,
17
+ call_custom_openai_api,
16
18
  call_deepseek_api,
17
19
  call_fireworks_api,
18
20
  call_gemini_api,
@@ -84,6 +86,8 @@ def generate_commit_message(
84
86
  "anthropic": call_anthropic_api,
85
87
  "cerebras": call_cerebras_api,
86
88
  "chutes": call_chutes_api,
89
+ "custom-anthropic": call_custom_anthropic_api,
90
+ "custom-openai": call_custom_openai_api,
87
91
  "deepseek": call_deepseek_api,
88
92
  "fireworks": call_fireworks_api,
89
93
  "gemini": call_gemini_api,
gac/ai_utils.py CHANGED
@@ -110,6 +110,8 @@ def generate_with_retries(
110
110
  "together",
111
111
  "zai",
112
112
  "zai-coding",
113
+ "custom-anthropic",
114
+ "custom-openai",
113
115
  ]
114
116
  if provider not in supported_providers:
115
117
  raise AIError.model_error(f"Unsupported provider: {provider}. Supported providers: {supported_providers}")
gac/init_cli.py CHANGED
@@ -35,6 +35,8 @@ def init() -> None:
35
35
  ("Anthropic", "claude-haiku-4-5"),
36
36
  ("Cerebras", "qwen-3-coder-480b"),
37
37
  ("Chutes", "zai-org/GLM-4.6-FP8"),
38
+ ("Custom (Anthropic)", ""),
39
+ ("Custom (OpenAI)", ""),
38
40
  ("DeepSeek", "deepseek-chat"),
39
41
  ("Fireworks", "accounts/fireworks/models/gpt-oss-20b"),
40
42
  ("Gemini", "gemini-2.5-flash"),
@@ -55,12 +57,14 @@ def init() -> None:
55
57
  if not provider:
56
58
  click.echo("Provider selection cancelled. Exiting.")
57
59
  return
58
- provider_key = provider.lower().replace(".", "").replace(" ", "-")
60
+ provider_key = provider.lower().replace(".", "").replace(" ", "-").replace("(", "").replace(")", "")
59
61
 
60
62
  is_ollama = provider_key == "ollama"
61
63
  is_lmstudio = provider_key == "lm-studio"
62
64
  is_streamlake = provider_key == "streamlake"
63
65
  is_zai = provider_key in ("zai", "zai-coding")
66
+ is_custom_anthropic = provider_key == "custom-anthropic"
67
+ is_custom_openai = provider_key == "custom-openai"
64
68
 
65
69
  if is_streamlake:
66
70
  endpoint_id = _prompt_required_text("Enter the Streamlake inference endpoint ID (required):")
@@ -70,7 +74,10 @@ def init() -> None:
70
74
  model_to_save = endpoint_id
71
75
  else:
72
76
  model_suggestion = dict(providers)[provider]
73
- model_prompt = f"Enter the model (default: {model_suggestion}):"
77
+ if model_suggestion == "":
78
+ model_prompt = "Enter the model (required):"
79
+ else:
80
+ model_prompt = f"Enter the model (default: {model_suggestion}):"
74
81
  model = questionary.text(model_prompt, default=model_suggestion).ask()
75
82
  if model is None:
76
83
  click.echo("Model entry cancelled. Exiting.")
@@ -80,7 +87,28 @@ def init() -> None:
80
87
  set_key(str(GAC_ENV_PATH), "GAC_MODEL", f"{provider_key}:{model_to_save}")
81
88
  click.echo(f"Set GAC_MODEL={provider_key}:{model_to_save}")
82
89
 
83
- if is_ollama:
90
+ if is_custom_anthropic:
91
+ base_url = _prompt_required_text("Enter the custom Anthropic-compatible base URL (required):")
92
+ if base_url is None:
93
+ click.echo("Custom Anthropic base URL entry cancelled. Exiting.")
94
+ return
95
+ set_key(str(GAC_ENV_PATH), "CUSTOM_ANTHROPIC_BASE_URL", base_url)
96
+ click.echo(f"Set CUSTOM_ANTHROPIC_BASE_URL={base_url}")
97
+
98
+ api_version = questionary.text(
99
+ "Enter the API version (optional, press Enter for default: 2023-06-01):", default="2023-06-01"
100
+ ).ask()
101
+ if api_version and api_version != "2023-06-01":
102
+ set_key(str(GAC_ENV_PATH), "CUSTOM_ANTHROPIC_VERSION", api_version)
103
+ click.echo(f"Set CUSTOM_ANTHROPIC_VERSION={api_version}")
104
+ elif is_custom_openai:
105
+ base_url = _prompt_required_text("Enter the custom OpenAI-compatible base URL (required):")
106
+ if base_url is None:
107
+ click.echo("Custom OpenAI base URL entry cancelled. Exiting.")
108
+ return
109
+ set_key(str(GAC_ENV_PATH), "CUSTOM_OPENAI_BASE_URL", base_url)
110
+ click.echo(f"Set CUSTOM_OPENAI_BASE_URL={base_url}")
111
+ elif is_ollama:
84
112
  url_default = "http://localhost:11434"
85
113
  url = questionary.text(f"Enter the Ollama API URL (default: {url_default}):", default=url_default).ask()
86
114
  if url is None:
@@ -112,6 +140,10 @@ def init() -> None:
112
140
  api_key_name = "LMSTUDIO_API_KEY"
113
141
  elif is_zai:
114
142
  api_key_name = "ZAI_API_KEY"
143
+ elif is_custom_anthropic:
144
+ api_key_name = "CUSTOM_ANTHROPIC_API_KEY"
145
+ elif is_custom_openai:
146
+ api_key_name = "CUSTOM_OPENAI_API_KEY"
115
147
  else:
116
148
  api_key_name = f"{provider_key.upper()}_API_KEY"
117
149
  set_key(str(GAC_ENV_PATH), api_key_name, api_key)
gac/providers/__init__.py CHANGED
@@ -3,6 +3,8 @@
3
3
  from .anthropic import call_anthropic_api
4
4
  from .cerebras import call_cerebras_api
5
5
  from .chutes import call_chutes_api
6
+ from .custom_anthropic import call_custom_anthropic_api
7
+ from .custom_openai import call_custom_openai_api
6
8
  from .deepseek import call_deepseek_api
7
9
  from .fireworks import call_fireworks_api
8
10
  from .gemini import call_gemini_api
@@ -21,6 +23,8 @@ __all__ = [
21
23
  "call_anthropic_api",
22
24
  "call_cerebras_api",
23
25
  "call_chutes_api",
26
+ "call_custom_anthropic_api",
27
+ "call_custom_openai_api",
24
28
  "call_deepseek_api",
25
29
  "call_fireworks_api",
26
30
  "call_gemini_api",
@@ -0,0 +1,133 @@
1
+ """Custom Anthropic-compatible API provider for gac.
2
+
3
+ This provider allows users to specify a custom Anthropic-compatible endpoint
4
+ while using the same model capabilities as the standard Anthropic provider.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ import os
10
+
11
+ import httpx
12
+
13
+ from gac.errors import AIError
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def call_custom_anthropic_api(model: str, messages: list[dict], temperature: float, max_tokens: int) -> str:
19
+ """Call a custom Anthropic-compatible API endpoint.
20
+
21
+ This provider is useful for:
22
+ - Anthropic-compatible proxies or gateways
23
+ - Self-hosted Anthropic-compatible services
24
+ - Other services implementing the Anthropic Messages API
25
+
26
+ Environment variables:
27
+ CUSTOM_ANTHROPIC_API_KEY: API key for authentication (required)
28
+ CUSTOM_ANTHROPIC_BASE_URL: Base URL for the API endpoint (required)
29
+ Example: https://your-proxy.example.com
30
+ CUSTOM_ANTHROPIC_VERSION: API version header (optional, defaults to '2023-06-01')
31
+
32
+ Args:
33
+ model: The model to use (e.g., 'claude-3-5-sonnet-20241022', 'claude-3-5-haiku-latest')
34
+ messages: List of message dictionaries with 'role' and 'content' keys
35
+ temperature: Controls randomness (0.0-1.0)
36
+ max_tokens: Maximum tokens in the response
37
+
38
+ Returns:
39
+ The generated commit message
40
+
41
+ Raises:
42
+ AIError: If authentication fails, API errors occur, or response is invalid
43
+ """
44
+ api_key = os.getenv("CUSTOM_ANTHROPIC_API_KEY")
45
+ if not api_key:
46
+ raise AIError.authentication_error("CUSTOM_ANTHROPIC_API_KEY environment variable not set")
47
+
48
+ base_url = os.getenv("CUSTOM_ANTHROPIC_BASE_URL")
49
+ if not base_url:
50
+ raise AIError.model_error("CUSTOM_ANTHROPIC_BASE_URL environment variable not set")
51
+
52
+ api_version = os.getenv("CUSTOM_ANTHROPIC_VERSION", "2023-06-01")
53
+
54
+ if "/v1/messages" not in base_url:
55
+ base_url = base_url.rstrip("/")
56
+ url = f"{base_url}/v1/messages"
57
+ else:
58
+ url = base_url
59
+
60
+ headers = {"x-api-key": api_key, "anthropic-version": api_version, "content-type": "application/json"}
61
+
62
+ anthropic_messages = []
63
+ system_message = ""
64
+
65
+ for msg in messages:
66
+ if msg["role"] == "system":
67
+ system_message = msg["content"]
68
+ else:
69
+ anthropic_messages.append({"role": msg["role"], "content": msg["content"]})
70
+
71
+ data = {"model": model, "messages": anthropic_messages, "temperature": temperature, "max_tokens": max_tokens}
72
+
73
+ if system_message:
74
+ data["system"] = system_message
75
+
76
+ try:
77
+ response = httpx.post(url, headers=headers, json=data, timeout=120)
78
+ response.raise_for_status()
79
+ response_data = response.json()
80
+
81
+ try:
82
+ content_list = response_data.get("content", [])
83
+ if not content_list:
84
+ raise AIError.model_error("Custom Anthropic API returned empty content array")
85
+
86
+ # Try standard Anthropic format first: content[0].text
87
+ if "text" in content_list[0]:
88
+ content = content_list[0]["text"]
89
+ else:
90
+ # Extended format (e.g., MiniMax with thinking): find first item with type="text"
91
+ text_item = next((item for item in content_list if item.get("type") == "text"), None)
92
+ if text_item and "text" in text_item:
93
+ content = text_item["text"]
94
+ else:
95
+ logger.error(
96
+ f"Unexpected response format from Custom Anthropic API. Response: {json.dumps(response_data)}"
97
+ )
98
+ raise AIError.model_error(
99
+ "Custom Anthropic API returned unexpected format. Expected 'text' field in content array."
100
+ )
101
+ except AIError:
102
+ raise
103
+ except (KeyError, IndexError, TypeError, StopIteration) as e:
104
+ logger.error(f"Unexpected response format from Custom Anthropic API. Response: {json.dumps(response_data)}")
105
+ raise AIError.model_error(
106
+ f"Custom Anthropic API returned unexpected format. Expected Anthropic-compatible response with "
107
+ f"'content[0].text' or items with type='text', but got: {type(e).__name__}. "
108
+ f"Check logs for full response structure."
109
+ ) from e
110
+
111
+ if content is None:
112
+ raise AIError.model_error("Custom Anthropic API returned null content")
113
+ if content == "":
114
+ raise AIError.model_error("Custom Anthropic API returned empty content")
115
+ return content
116
+ except httpx.ConnectError as e:
117
+ raise AIError.connection_error(f"Custom Anthropic API connection failed: {str(e)}") from e
118
+ except httpx.HTTPStatusError as e:
119
+ status_code = e.response.status_code
120
+ error_text = e.response.text
121
+
122
+ if status_code == 401:
123
+ raise AIError.authentication_error(f"Custom Anthropic API authentication failed: {error_text}") from e
124
+ elif status_code == 429:
125
+ raise AIError.rate_limit_error(f"Custom Anthropic API rate limit exceeded: {error_text}") from e
126
+ else:
127
+ raise AIError.model_error(f"Custom Anthropic API error: {status_code} - {error_text}") from e
128
+ except httpx.TimeoutException as e:
129
+ raise AIError.timeout_error(f"Custom Anthropic API request timed out: {str(e)}") from e
130
+ except AIError:
131
+ raise
132
+ except Exception as e:
133
+ raise AIError.model_error(f"Error calling Custom Anthropic API: {str(e)}") from e
@@ -0,0 +1,99 @@
1
+ """Custom OpenAI-compatible API provider for gac.
2
+
3
+ This provider allows users to specify a custom OpenAI-compatible endpoint
4
+ while using the same model capabilities as the standard OpenAI provider.
5
+ """
6
+
7
+ import json
8
+ import logging
9
+ import os
10
+
11
+ import httpx
12
+
13
+ from gac.errors import AIError
14
+
15
+ logger = logging.getLogger(__name__)
16
+
17
+
18
+ def call_custom_openai_api(model: str, messages: list[dict], temperature: float, max_tokens: int) -> str:
19
+ """Call a custom OpenAI-compatible API endpoint.
20
+
21
+ This provider is useful for:
22
+ - Azure OpenAI Service
23
+ - OpenAI-compatible proxies or gateways
24
+ - Self-hosted OpenAI-compatible services
25
+ - Other services implementing the OpenAI Chat Completions API
26
+
27
+ Environment variables:
28
+ CUSTOM_OPENAI_API_KEY: API key for authentication (required)
29
+ CUSTOM_OPENAI_BASE_URL: Base URL for the API endpoint (required)
30
+ Example: https://your-endpoint.openai.azure.com
31
+ Example: https://your-proxy.example.com/v1
32
+
33
+ Args:
34
+ model: The model to use (e.g., 'gpt-4', 'gpt-3.5-turbo')
35
+ messages: List of message dictionaries with 'role' and 'content' keys
36
+ temperature: Controls randomness (0.0-1.0)
37
+ max_tokens: Maximum tokens in the response
38
+
39
+ Returns:
40
+ The generated commit message
41
+
42
+ Raises:
43
+ AIError: If authentication fails, API errors occur, or response is invalid
44
+ """
45
+ api_key = os.getenv("CUSTOM_OPENAI_API_KEY")
46
+ if not api_key:
47
+ raise AIError.authentication_error("CUSTOM_OPENAI_API_KEY environment variable not set")
48
+
49
+ base_url = os.getenv("CUSTOM_OPENAI_BASE_URL")
50
+ if not base_url:
51
+ raise AIError.model_error("CUSTOM_OPENAI_BASE_URL environment variable not set")
52
+
53
+ if "/chat/completions" not in base_url:
54
+ base_url = base_url.rstrip("/")
55
+ url = f"{base_url}/chat/completions"
56
+ else:
57
+ url = base_url
58
+
59
+ headers = {"Authorization": f"Bearer {api_key}", "Content-Type": "application/json"}
60
+
61
+ data = {"model": model, "messages": messages, "temperature": temperature, "max_completion_tokens": max_tokens}
62
+
63
+ try:
64
+ response = httpx.post(url, headers=headers, json=data, timeout=120)
65
+ response.raise_for_status()
66
+ response_data = response.json()
67
+
68
+ try:
69
+ content = response_data["choices"][0]["message"]["content"]
70
+ except (KeyError, IndexError, TypeError) as e:
71
+ logger.error(f"Unexpected response format from Custom OpenAI API. Response: {json.dumps(response_data)}")
72
+ raise AIError.model_error(
73
+ f"Custom OpenAI API returned unexpected format. Expected OpenAI-compatible response with "
74
+ f"'choices[0].message.content', but got: {type(e).__name__}. Check logs for full response structure."
75
+ ) from e
76
+
77
+ if content is None:
78
+ raise AIError.model_error("Custom OpenAI API returned null content")
79
+ if content == "":
80
+ raise AIError.model_error("Custom OpenAI API returned empty content")
81
+ return content
82
+ except httpx.ConnectError as e:
83
+ raise AIError.connection_error(f"Custom OpenAI API connection failed: {str(e)}") from e
84
+ except httpx.HTTPStatusError as e:
85
+ status_code = e.response.status_code
86
+ error_text = e.response.text
87
+
88
+ if status_code == 401:
89
+ raise AIError.authentication_error(f"Custom OpenAI API authentication failed: {error_text}") from e
90
+ elif status_code == 429:
91
+ raise AIError.rate_limit_error(f"Custom OpenAI API rate limit exceeded: {error_text}") from e
92
+ else:
93
+ raise AIError.model_error(f"Custom OpenAI API error: {status_code} - {error_text}") from e
94
+ except httpx.TimeoutException as e:
95
+ raise AIError.timeout_error(f"Custom OpenAI API request timed out: {str(e)}") from e
96
+ except AIError:
97
+ raise
98
+ except Exception as e:
99
+ raise AIError.model_error(f"Error calling Custom OpenAI API: {str(e)}") from e
gac/providers/gemini.py CHANGED
@@ -16,32 +16,37 @@ def call_gemini_api(model: str, messages: list[dict[str, Any]], temperature: flo
16
16
 
17
17
  url = f"https://generativelanguage.googleapis.com/v1beta/models/{model}:generateContent"
18
18
 
19
- # Build contents array following 2025 Gemini API format
20
- contents = []
19
+ # Build Gemini request payload, converting roles to supported values.
20
+ contents: list[dict[str, Any]] = []
21
+ system_instruction_parts: list[dict[str, str]] = []
21
22
 
22
- # Add system instruction as first content with role "system" (2025 format)
23
23
  for msg in messages:
24
- if msg["role"] == "system":
25
- contents.append({"role": "system", "parts": [{"text": msg["content"]}]})
26
- break
24
+ role = msg.get("role")
25
+ content_value = msg.get("content")
26
+ content = "" if content_value is None else str(content_value)
27
27
 
28
- # Add user and assistant messages
29
- for msg in messages:
30
- if msg["role"] == "user":
31
- contents.append({"role": "user", "parts": [{"text": msg["content"]}]})
32
- elif msg["role"] == "assistant":
33
- contents.append(
34
- {
35
- "role": "model", # Gemini uses "model" instead of "assistant"
36
- "parts": [{"text": msg["content"]}],
37
- }
38
- )
28
+ if role == "system":
29
+ if content.strip():
30
+ system_instruction_parts.append({"text": content})
31
+ continue
32
+
33
+ if role == "assistant":
34
+ gemini_role = "model"
35
+ elif role == "user":
36
+ gemini_role = "user"
37
+ else:
38
+ raise AIError.model_error(f"Unsupported message role for Gemini API: {role}")
39
+
40
+ contents.append({"role": gemini_role, "parts": [{"text": content}]})
39
41
 
40
42
  payload: dict[str, Any] = {
41
43
  "contents": contents,
42
44
  "generationConfig": {"temperature": temperature, "maxOutputTokens": max_tokens},
43
45
  }
44
46
 
47
+ if system_instruction_parts:
48
+ payload["systemInstruction"] = {"role": "system", "parts": system_instruction_parts}
49
+
45
50
  headers = {"x-goog-api-key": api_key, "Content-Type": "application/json"}
46
51
 
47
52
  try:
@@ -50,18 +55,26 @@ def call_gemini_api(model: str, messages: list[dict[str, Any]], temperature: flo
50
55
  response_data = response.json()
51
56
 
52
57
  # Check for candidates and proper response structure
53
- if not response_data.get("candidates"):
58
+ candidates = response_data.get("candidates")
59
+ if not candidates:
54
60
  raise AIError.model_error("Gemini API response missing candidates")
55
61
 
56
- candidate = response_data["candidates"][0]
62
+ candidate = candidates[0]
57
63
  if "content" not in candidate or "parts" not in candidate["content"] or not candidate["content"]["parts"]:
58
64
  raise AIError.model_error("Gemini API response has invalid content structure")
59
65
 
60
- content = candidate["content"]["parts"][0].get("text")
61
- if content is None or content == "":
66
+ parts = candidate["content"]["parts"]
67
+ content_text: str | None = None
68
+ for part in parts:
69
+ if isinstance(part, dict):
70
+ part_text = part.get("text")
71
+ if isinstance(part_text, str) and part_text:
72
+ content_text = part_text
73
+ break
74
+ if content_text is None:
62
75
  raise AIError.model_error("Gemini API response missing text content")
63
76
 
64
- return content
77
+ return content_text
65
78
  except AIError:
66
79
  raise
67
80
  except httpx.HTTPStatusError as e:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: gac
3
- Version: 1.12.0
3
+ Version: 1.13.0
4
4
  Summary: LLM-powered Git commit message generator with multi-provider support
5
5
  Project-URL: Homepage, https://github.com/cellwebb/gac
6
6
  Project-URL: Documentation, https://github.com/cellwebb/gac#readme
@@ -91,9 +91,10 @@ gac
91
91
 
92
92
  ### 🌐 **Supported Providers**
93
93
 
94
- - **Anthropic** • **Cerebras** • **Chutes.ai** • **DeepSeek** • **Fireworks** • **Gemini**
95
- - **Groq** • **LM Studio** • **MiniMax** • **Ollama** • **OpenAI** • **OpenRouter**
96
- - **Streamlake** • **Synthetic.new** • **Together AI** • **Z.AI** • **Z.AI Coding**
94
+ - **Anthropic** • **Cerebras** • **Chutes.ai** • **DeepSeek** • **Fireworks**
95
+ - **Gemini** • **Groq** • **LM Studio** • **MiniMax** • **Ollama** • **OpenAI**
96
+ - **OpenRouter** • **Streamlake** • **Synthetic.new** • **Together AI**
97
+ - **Z.AI** • **Z.AI Coding** • **Custom Endpoint (Anthropic/OpenAI)**
97
98
 
98
99
  ### 🧠 **Smart LLM Analysis**
99
100
 
@@ -1,7 +1,7 @@
1
1
  gac/__init__.py,sha256=z9yGInqtycFIT3g1ca24r-A3699hKVaRqGUI79wsmMc,415
2
- gac/__version__.py,sha256=pz_YrWiIGihOSHm_4WiV9sLizQLrXIAAXp4teEYrS0A,67
3
- gac/ai.py,sha256=3Po9J0b1bjqtMs3p62yUMOMCU3ZT3iyh-QsagmevSWY,4081
4
- gac/ai_utils.py,sha256=tZNLGJEyKhvBQ8zI0xZ56gfI6optHdkoHUa2ZzEtdBQ,7373
2
+ gac/__version__.py,sha256=oFvFxf8Y4B3yJ6IccvXPcWK4lxUIK8Dr3Vc_FOg4a8E,67
3
+ gac/ai.py,sha256=fg642la4yMecOwfZHQ7Ixl6z-5_qj9Q1SxwVMnPDCcY,4244
4
+ gac/ai_utils.py,sha256=EDkw0nnwnV5Ba2CLEo2HC15-L5BZtGJATin5Az0ZHkg,7426
5
5
  gac/cli.py,sha256=crUUI6osYtE3QAZ7r6DRlVk9gR3X2PctzS1sssVQ9_g,5070
6
6
  gac/config.py,sha256=n3TkQYBqSKkH68QUM6M7kwSK83ghmItoh0p5ZDFnhHA,1746
7
7
  gac/config_cli.py,sha256=v9nFHZO1RvK9fzHyuUS6SG-BCLHMsdOMDwWamBhVVh4,1608
@@ -9,19 +9,21 @@ gac/constants.py,sha256=8GHB7yeK2CYT0t80-k9N6LvgZPe-StNH3dK3NsUO46c,4977
9
9
  gac/diff_cli.py,sha256=wnVQ9OFGnM0d2Pj9WVjWbo0jxqIuRHVAwmb8wU9Pa3E,5676
10
10
  gac/errors.py,sha256=ysDIVRCd0YQVTOW3Q6YzdolxCdtkoQCAFf3_jrqbjUY,7916
11
11
  gac/git.py,sha256=g6tvph50zV-wrTWrxARYXEpl0NeI8-ffFwHoqhp3fSE,8033
12
- gac/init_cli.py,sha256=mpjO6eVO-n3uT8NhJ7QWVQuVu2jFZ5aG4XksG2mbX78,4925
12
+ gac/init_cli.py,sha256=JsHMZBFt_2aFMATlbL_ugSZGQGJf8VRosFjNIRGNM8U,6573
13
13
  gac/main.py,sha256=dJrBSN5rJlbWspLGDx3eUJU4uZFVhvuv7qtgIvF7RH4,14723
14
14
  gac/preprocess.py,sha256=aMxsjGxy9YP752NWjgf0KP5Sn6p8keIJAGlMYr8jDgQ,15373
15
15
  gac/prompt.py,sha256=d_kBXmhf3bDVLyDj8J7AS7GBAxF2jlc8lXoHX3Dzi5k,24255
16
16
  gac/security.py,sha256=15Yp6YR8QC4eECJi1BUCkMteh_veZXUbLL6W8qGcDm4,9920
17
17
  gac/utils.py,sha256=nV42-brIHW_fBg7x855GM8nRrqEBbRzTSweg-GTyGE8,3971
18
- gac/providers/__init__.py,sha256=ZQES81QO2VnjCZkVRhyN1DDGQXrRD2DW0jswI1wU86Y,1136
18
+ gac/providers/__init__.py,sha256=3WTzh3ngAdvR40eezpMMFD7Zibb-LxexDYUcSm4axQI,1305
19
19
  gac/providers/anthropic.py,sha256=VK5d1s1PmBNDwh_x7illQ2CIZIHNIYU28btVfizwQPs,2036
20
20
  gac/providers/cerebras.py,sha256=Ik8lhlsliGJVkgDgqlThfpra9tqbdYQZkaC4eNxRd9w,1648
21
21
  gac/providers/chutes.py,sha256=cclJOLuGVIiiaF-9Bs1kH6SSOhEmduGB2zZ86KIaXKw,2617
22
+ gac/providers/custom_anthropic.py,sha256=l5-bRusVA19LFQ5QCZBU0Wa1rWbxoBHkWCwszQMPzvw,5545
23
+ gac/providers/custom_openai.py,sha256=EB1H21oV6CJNPGtM7u2EP7sGaPhCQ0lQAcIcj-2PXvE,3975
22
24
  gac/providers/deepseek.py,sha256=leT2S4_CE6JzwF3skDd4umBsu2rkJOJ66AfOdSL5wGc,1643
23
25
  gac/providers/fireworks.py,sha256=zsWhf6LMVdtsD9keXRFwgn9lCQigz6VmrDl6vqIVkdI,1688
24
- gac/providers/gemini.py,sha256=GZQz6Y9fd5-xk-U4pXn9bXLeBowxDXOYDyWyrtjFurM,2909
26
+ gac/providers/gemini.py,sha256=kl9WKdPm_ANYk0hsrUyMdACzR0cm8Eui9M1IwObYW-4,3348
25
27
  gac/providers/groq.py,sha256=9v2fAjDa_iRNHFptiUBN8Vt7ZDKkW_JOmIBeYvycD1M,2806
26
28
  gac/providers/lmstudio.py,sha256=R82-f0tWdFfGQxLT6o3Q2tfvYguF7ESUg9DEUHNyrDk,2146
27
29
  gac/providers/minimax.py,sha256=oI5rEVlkcYenNUNH53zS00X8NqpcZ1gMsTGzQCsmes4,1630
@@ -32,8 +34,8 @@ gac/providers/streamlake.py,sha256=KAA2ZnpuEI5imzvdWVWUhEBHSP0BMnprKXte6CbwBWY,2
32
34
  gac/providers/synthetic.py,sha256=sRMIJTS9LpcXd9A7qp_ZjZxdqtTKRn9fl1W4YwJZP4c,1855
33
35
  gac/providers/together.py,sha256=1bUIVHfYzcEDw4hQPE8qV6hjc2JNHPv_khVgpk2IJxI,1667
34
36
  gac/providers/zai.py,sha256=kywhhrCfPBu0rElZyb-iENxQxxpVGykvePuL4xrXlaU,2739
35
- gac-1.12.0.dist-info/METADATA,sha256=tssJw0ytHWHFn5YgNjNdKWLcxEaWffc6BR4WHGXctDc,7837
36
- gac-1.12.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
37
- gac-1.12.0.dist-info/entry_points.txt,sha256=tdjN-XMmcWfL92swuRAjT62bFLOAwk9bTMRLGP5Z4aI,36
38
- gac-1.12.0.dist-info/licenses/LICENSE,sha256=vOab37NouL1PNs5BswnPayrMCqaN2sqLfMQfqPDrpZg,1103
39
- gac-1.12.0.dist-info/RECORD,,
37
+ gac-1.13.0.dist-info/METADATA,sha256=pu638LGgkEXsDWzoHnchIkOOne5MjzQ25ILkDtXVOfs,7878
38
+ gac-1.13.0.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
39
+ gac-1.13.0.dist-info/entry_points.txt,sha256=tdjN-XMmcWfL92swuRAjT62bFLOAwk9bTMRLGP5Z4aI,36
40
+ gac-1.13.0.dist-info/licenses/LICENSE,sha256=vOab37NouL1PNs5BswnPayrMCqaN2sqLfMQfqPDrpZg,1103
41
+ gac-1.13.0.dist-info/RECORD,,
File without changes