janito 2.2.0__py3-none-any.whl → 2.3.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- janito/__init__.py +1 -1
- janito/agent/setup_agent.py +14 -5
- janito/agent/templates/profiles/system_prompt_template_main.txt.j2 +3 -1
- janito/cli/chat_mode/bindings.py +6 -0
- janito/cli/chat_mode/session.py +16 -0
- janito/cli/chat_mode/shell/commands/__init__.py +3 -0
- janito/cli/chat_mode/shell/commands/exec.py +27 -0
- janito/cli/chat_mode/shell/commands/tools.py +17 -6
- janito/cli/chat_mode/shell/session/manager.py +1 -0
- janito/cli/chat_mode/toolbar.py +1 -0
- janito/cli/cli_commands/model_utils.py +95 -84
- janito/cli/config.py +2 -1
- janito/cli/core/getters.py +33 -31
- janito/cli/core/runner.py +165 -148
- janito/cli/core/setters.py +5 -1
- janito/cli/main_cli.py +12 -1
- janito/cli/prompt_core.py +5 -2
- janito/cli/rich_terminal_reporter.py +22 -3
- janito/cli/single_shot_mode/handler.py +11 -1
- janito/cli/verbose_output.py +1 -1
- janito/config_manager.py +112 -110
- janito/driver_events.py +14 -0
- janito/drivers/azure_openai/driver.py +38 -3
- janito/drivers/driver_registry.py +0 -2
- janito/drivers/openai/driver.py +196 -36
- janito/llm/auth.py +63 -62
- janito/llm/driver.py +7 -1
- janito/llm/driver_config.py +1 -0
- janito/provider_config.py +7 -3
- janito/provider_registry.py +18 -0
- janito/providers/__init__.py +1 -0
- janito/providers/anthropic/provider.py +4 -2
- janito/providers/azure_openai/model_info.py +16 -15
- janito/providers/azure_openai/provider.py +33 -2
- janito/providers/deepseek/provider.py +3 -0
- janito/providers/google/model_info.py +21 -29
- janito/providers/google/provider.py +52 -38
- janito/providers/mistralai/provider.py +5 -2
- janito/providers/openai/provider.py +4 -0
- janito/providers/provider_static_info.py +2 -3
- janito/tools/adapters/local/adapter.py +33 -11
- janito/tools/adapters/local/delete_text_in_file.py +4 -7
- janito/tools/adapters/local/move_file.py +3 -13
- janito/tools/adapters/local/remove_directory.py +6 -17
- janito/tools/adapters/local/remove_file.py +4 -10
- janito/tools/adapters/local/replace_text_in_file.py +6 -9
- janito/tools/adapters/local/search_text/match_lines.py +1 -1
- janito/tools/tools_adapter.py +78 -6
- janito/version.py +1 -1
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/METADATA +149 -10
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/RECORD +55 -56
- janito/drivers/google_genai/driver.py +0 -54
- janito/drivers/google_genai/schema_generator.py +0 -67
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/WHEEL +0 -0
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/entry_points.txt +0 -0
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/licenses/LICENSE +0 -0
- {janito-2.2.0.dist-info → janito-2.3.1.dist-info}/top_level.txt +0 -0
janito/llm/auth.py
CHANGED
@@ -1,62 +1,63 @@
|
|
1
|
-
"""
|
2
|
-
LLMAuthManager: Handles authentication credentials for LLM providers, persisted in ~/.janito/auth.json or a custom path.
|
3
|
-
"""
|
4
|
-
|
5
|
-
import os
|
6
|
-
import json
|
7
|
-
from typing import Dict, Optional
|
8
|
-
|
9
|
-
|
10
|
-
class LLMAuthManager:
|
11
|
-
"""
|
12
|
-
Manages authentication tokens, API keys, or credentials for LLM providers.
|
13
|
-
Persists credentials in ~/.janito/auth.json or a custom path.
|
14
|
-
"""
|
15
|
-
|
16
|
-
def __init__(self, auth_file: Optional[str] = None):
|
17
|
-
if auth_file is not None:
|
18
|
-
self._auth_file = os.path.expanduser(auth_file)
|
19
|
-
else:
|
20
|
-
self._auth_file = os.path.expanduser("~/.janito/auth.json")
|
21
|
-
self._credentials: Dict[str, str] = {}
|
22
|
-
self._load_credentials()
|
23
|
-
|
24
|
-
def _load_credentials(self):
|
25
|
-
if os.path.exists(self._auth_file):
|
26
|
-
try:
|
27
|
-
with open(self._auth_file, "r") as f:
|
28
|
-
self._credentials = json.load(f)
|
29
|
-
except Exception:
|
30
|
-
self._credentials = {}
|
31
|
-
else:
|
32
|
-
self._credentials = {}
|
33
|
-
|
34
|
-
def _save_credentials(self):
|
35
|
-
os.makedirs(os.path.dirname(self._auth_file), exist_ok=True)
|
36
|
-
with open(self._auth_file, "w") as f:
|
37
|
-
json.dump(self._credentials, f, indent=2)
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
self.
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
self.
|
1
|
+
"""
|
2
|
+
LLMAuthManager: Handles authentication credentials for LLM providers, persisted in ~/.janito/auth.json or a custom path.
|
3
|
+
"""
|
4
|
+
|
5
|
+
import os
|
6
|
+
import json
|
7
|
+
from typing import Dict, Optional
|
8
|
+
|
9
|
+
|
10
|
+
class LLMAuthManager:
|
11
|
+
"""
|
12
|
+
Manages authentication tokens, API keys, or credentials for LLM providers.
|
13
|
+
Persists credentials in ~/.janito/auth.json or a custom path.
|
14
|
+
"""
|
15
|
+
|
16
|
+
def __init__(self, auth_file: Optional[str] = None):
|
17
|
+
if auth_file is not None:
|
18
|
+
self._auth_file = os.path.expanduser(auth_file)
|
19
|
+
else:
|
20
|
+
self._auth_file = os.path.expanduser("~/.janito/auth.json")
|
21
|
+
self._credentials: Dict[str, str] = {}
|
22
|
+
self._load_credentials()
|
23
|
+
|
24
|
+
def _load_credentials(self):
|
25
|
+
if os.path.exists(self._auth_file):
|
26
|
+
try:
|
27
|
+
with open(self._auth_file, "r") as f:
|
28
|
+
self._credentials = json.load(f)
|
29
|
+
except Exception:
|
30
|
+
self._credentials = {}
|
31
|
+
else:
|
32
|
+
self._credentials = {}
|
33
|
+
|
34
|
+
def _save_credentials(self):
|
35
|
+
os.makedirs(os.path.dirname(self._auth_file), exist_ok=True)
|
36
|
+
with open(self._auth_file, "w") as f:
|
37
|
+
json.dump(self._credentials, f, indent=2)
|
38
|
+
f.write("\n")
|
39
|
+
|
40
|
+
def set_credentials(self, provider_name: str, credentials: str) -> None:
|
41
|
+
"""
|
42
|
+
Store credentials for a given provider and persist to disk. Raises ValueError if provider is unknown.
|
43
|
+
"""
|
44
|
+
from janito.providers.registry import LLMProviderRegistry
|
45
|
+
|
46
|
+
if provider_name not in LLMProviderRegistry.list_providers():
|
47
|
+
raise ValueError(f"Unknown provider: {provider_name}")
|
48
|
+
self._credentials[provider_name] = credentials
|
49
|
+
self._save_credentials()
|
50
|
+
|
51
|
+
def get_credentials(self, provider_name: str) -> Optional[str]:
|
52
|
+
"""
|
53
|
+
Retrieve credentials for a given provider.
|
54
|
+
"""
|
55
|
+
return self._credentials.get(provider_name)
|
56
|
+
|
57
|
+
def remove_credentials(self, provider_name: str) -> None:
|
58
|
+
"""
|
59
|
+
Remove credentials for a given provider and update disk.
|
60
|
+
"""
|
61
|
+
if provider_name in self._credentials:
|
62
|
+
del self._credentials[provider_name]
|
63
|
+
self._save_credentials()
|
janito/llm/driver.py
CHANGED
@@ -122,11 +122,17 @@ class LLMDriver(ABC):
|
|
122
122
|
if not self.available:
|
123
123
|
self.handle_driver_unavailable(request_id)
|
124
124
|
return
|
125
|
+
# Prepare payload for RequestStarted event
|
126
|
+
payload = {"provider_name": self.provider_name}
|
127
|
+
if hasattr(config, "model") and getattr(config, "model", None):
|
128
|
+
payload["model"] = getattr(config, "model")
|
129
|
+
elif hasattr(config, "model_name") and getattr(config, "model_name", None):
|
130
|
+
payload["model"] = getattr(config, "model_name")
|
125
131
|
self.output_queue.put(
|
126
132
|
RequestStarted(
|
127
133
|
driver_name=self.__class__.__name__,
|
128
134
|
request_id=request_id,
|
129
|
-
payload=
|
135
|
+
payload=payload,
|
130
136
|
)
|
131
137
|
)
|
132
138
|
# Check for cancel_event before starting
|
janito/llm/driver_config.py
CHANGED
@@ -23,6 +23,7 @@ class LLMDriverConfig:
|
|
23
23
|
presence_penalty: Optional[float] = None
|
24
24
|
frequency_penalty: Optional[float] = None
|
25
25
|
stop: Optional[Any] = None # list or string, depending on backend
|
26
|
+
reasoning_effort: Optional[str] = None
|
26
27
|
extra: dict = field(
|
27
28
|
default_factory=dict
|
28
29
|
) # for provider-specific miscellaneous config fields
|
janito/provider_config.py
CHANGED
@@ -36,6 +36,7 @@ def set_provider_config(provider, key, value):
|
|
36
36
|
config.file_config["providers"] = cfg
|
37
37
|
with open(config.config_path, "w", encoding="utf-8") as f:
|
38
38
|
json.dump(config.file_config, f, indent=2)
|
39
|
+
f.write("\n")
|
39
40
|
|
40
41
|
|
41
42
|
def set_provider_model_config(provider, model, key, value):
|
@@ -51,6 +52,7 @@ def set_provider_model_config(provider, model, key, value):
|
|
51
52
|
config.file_config["providers"] = cfg
|
52
53
|
with open(config.config_path, "w", encoding="utf-8") as f:
|
53
54
|
json.dump(config.file_config, f, indent=2)
|
55
|
+
f.write("\n")
|
54
56
|
|
55
57
|
|
56
58
|
def get_provider_model_config(provider, model):
|
@@ -71,9 +73,11 @@ def get_effective_model(provider=None, requested_model=None):
|
|
71
73
|
provider_model = config.get_provider_config(provider).get("model")
|
72
74
|
if provider_model:
|
73
75
|
return provider_model
|
74
|
-
|
75
|
-
if
|
76
|
-
|
76
|
+
# Only use global model if no provider is specified
|
77
|
+
if provider is None:
|
78
|
+
global_model = config.get("model")
|
79
|
+
if global_model:
|
80
|
+
return global_model
|
77
81
|
return None
|
78
82
|
|
79
83
|
|
janito/provider_registry.py
CHANGED
@@ -48,6 +48,24 @@ class ProviderRegistry:
|
|
48
48
|
table.add_section()
|
49
49
|
|
50
50
|
def _print_table(self, table):
|
51
|
+
"""Print the table using rich when running in a terminal; otherwise fall back to a plain ASCII listing.
|
52
|
+
This avoids UnicodeDecodeError when the parent process captures the output with a non-UTF8 encoding.
|
53
|
+
"""
|
54
|
+
import sys
|
55
|
+
|
56
|
+
if sys.stdout.isatty():
|
57
|
+
# Safe to use rich's unicode output when attached to an interactive terminal.
|
58
|
+
shared_console.print(table)
|
59
|
+
return
|
60
|
+
|
61
|
+
# Fallback: plain ASCII output
|
62
|
+
print("Supported LLM Providers")
|
63
|
+
print("Provider | Maintainer | Model Names")
|
64
|
+
for row in table.rows:
|
65
|
+
# row is a rich.table.Row -> row.cells is a list of Text objects
|
66
|
+
cells_text = [str(cell) for cell in row.cells]
|
67
|
+
ascii_row = " | ".join(cells_text).encode("ascii", "ignore").decode("ascii")
|
68
|
+
print(ascii_row)
|
51
69
|
shared_console.print(table)
|
52
70
|
|
53
71
|
def _get_provider_info(self, provider_name):
|
janito/providers/__init__.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1
1
|
# Ensure all providers are registered by importing their modules
|
2
2
|
import janito.providers.openai.provider
|
3
|
+
import janito.providers.google.provider
|
3
4
|
import janito.providers.mistralai.provider
|
4
5
|
import janito.providers.google.provider
|
5
6
|
import janito.providers.azure_openai.provider
|
@@ -2,7 +2,7 @@ from janito.llm.provider import LLMProvider
|
|
2
2
|
from janito.llm.model import LLMModelInfo
|
3
3
|
from janito.llm.auth import LLMAuthManager
|
4
4
|
from janito.llm.driver_config import LLMDriverConfig
|
5
|
-
from janito.tools
|
5
|
+
from janito.tools import get_local_tools_adapter
|
6
6
|
from janito.providers.registry import LLMProviderRegistry
|
7
7
|
|
8
8
|
from .model_info import MODEL_SPECS
|
@@ -23,12 +23,14 @@ class AnthropicProvider(LLMProvider):
|
|
23
23
|
def __init__(
|
24
24
|
self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
|
25
25
|
):
|
26
|
+
# Ensure we always have a tools adapter, even if the driver itself is unavailable.
|
27
|
+
self._tools_adapter = get_local_tools_adapter()
|
26
28
|
if not self.available:
|
27
29
|
self._driver = None
|
28
30
|
return
|
29
31
|
self.auth_manager = auth_manager or LLMAuthManager()
|
30
32
|
self._api_key = self.auth_manager.get_credentials(type(self).name)
|
31
|
-
self._tools_adapter =
|
33
|
+
self._tools_adapter = get_local_tools_adapter()
|
32
34
|
self._info = config or LLMDriverConfig(model=None)
|
33
35
|
if not self._info.model:
|
34
36
|
self._info.model = self.DEFAULT_MODEL
|
@@ -1,15 +1,16 @@
|
|
1
|
-
from janito.llm.model import LLMModelInfo
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
1
|
+
from janito.llm.model import LLMModelInfo
|
2
|
+
from janito.providers.openai.model_info import MODEL_SPECS as OPENAI_MODEL_SPECS
|
3
|
+
|
4
|
+
MODEL_SPECS = {
|
5
|
+
"azure_openai_deployment": LLMModelInfo(
|
6
|
+
name="azure_openai_deployment",
|
7
|
+
context=OPENAI_MODEL_SPECS["gpt-4o"].context,
|
8
|
+
max_input=OPENAI_MODEL_SPECS["gpt-4o"].max_input,
|
9
|
+
max_cot=OPENAI_MODEL_SPECS["gpt-4o"].max_cot,
|
10
|
+
max_response=OPENAI_MODEL_SPECS["gpt-4o"].max_response,
|
11
|
+
thinking_supported=OPENAI_MODEL_SPECS["gpt-4o"].thinking_supported,
|
12
|
+
default_temp=OPENAI_MODEL_SPECS["gpt-4o"].default_temp,
|
13
|
+
open="azure_openai",
|
14
|
+
driver="AzureOpenAIModelDriver",
|
15
|
+
)
|
16
|
+
}
|
@@ -2,7 +2,7 @@ from janito.llm.provider import LLMProvider
|
|
2
2
|
from janito.llm.model import LLMModelInfo
|
3
3
|
from janito.llm.auth import LLMAuthManager
|
4
4
|
from janito.llm.driver_config import LLMDriverConfig
|
5
|
-
from janito.tools
|
5
|
+
from janito.tools import get_local_tools_adapter
|
6
6
|
from janito.providers.registry import LLMProviderRegistry
|
7
7
|
|
8
8
|
from .model_info import MODEL_SPECS
|
@@ -23,12 +23,15 @@ class AzureOpenAIProvider(LLMProvider):
|
|
23
23
|
def __init__(
|
24
24
|
self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
|
25
25
|
):
|
26
|
+
# Always create a tools adapter so that provider.execute_tool() works even when
|
27
|
+
# the underlying driver is not available (e.g. OpenAI SDK not installed).
|
28
|
+
self._tools_adapter = get_local_tools_adapter()
|
26
29
|
if not self.available:
|
27
30
|
self._driver = None
|
28
31
|
return
|
29
32
|
self._auth_manager = auth_manager or LLMAuthManager()
|
30
33
|
self._api_key = self._auth_manager.get_credentials(type(self).name)
|
31
|
-
self._tools_adapter =
|
34
|
+
self._tools_adapter = get_local_tools_adapter()
|
32
35
|
self._driver_config = config or LLMDriverConfig(model=None)
|
33
36
|
if not self._driver_config.model:
|
34
37
|
self._driver_config.model = self.DEFAULT_MODEL
|
@@ -36,6 +39,11 @@ class AzureOpenAIProvider(LLMProvider):
|
|
36
39
|
self._driver_config.api_key = self._api_key
|
37
40
|
if not self._driver_config.extra.get("api_version"):
|
38
41
|
self._driver_config.extra["api_version"] = "2023-05-15"
|
42
|
+
# Inject azure_deployment_name from config if present
|
43
|
+
from janito.config import config as global_config
|
44
|
+
deployment_name = global_config.get("azure_deployment_name")
|
45
|
+
if deployment_name:
|
46
|
+
self._driver_config.extra["azure_deployment_name"] = deployment_name
|
39
47
|
self.fill_missing_device_info(self._driver_config)
|
40
48
|
self._driver = AzureOpenAIModelDriver(tools_adapter=self._tools_adapter)
|
41
49
|
|
@@ -61,6 +69,29 @@ class AzureOpenAIProvider(LLMProvider):
|
|
61
69
|
"""
|
62
70
|
return True
|
63
71
|
|
72
|
+
def get_model_info(self, model_name=None):
|
73
|
+
"""
|
74
|
+
For Azure OpenAI, accept any deployment name as a valid model name.
|
75
|
+
If the model_name is not in MODEL_SPECS, return a generic info dict.
|
76
|
+
"""
|
77
|
+
if model_name is None:
|
78
|
+
# Return all known specs, but note: only static ones are listed
|
79
|
+
return {name: model_info.to_dict() for name, model_info in self.MODEL_SPECS.items()}
|
80
|
+
if model_name in self.MODEL_SPECS:
|
81
|
+
return self.MODEL_SPECS[model_name].to_dict()
|
82
|
+
# Accept any deployment name as a valid model
|
83
|
+
return {
|
84
|
+
"name": model_name,
|
85
|
+
"context": "N/A",
|
86
|
+
"max_input": "N/A",
|
87
|
+
"max_cot": "N/A",
|
88
|
+
"max_response": "N/A",
|
89
|
+
"thinking_supported": False,
|
90
|
+
"default_temp": 0.2,
|
91
|
+
"open": "azure_openai",
|
92
|
+
"driver": "AzureOpenAIModelDriver",
|
93
|
+
}
|
94
|
+
|
64
95
|
def create_driver(self):
|
65
96
|
"""
|
66
97
|
Creates and returns a new AzureOpenAIModelDriver instance with the provider's configuration and tools adapter.
|
@@ -21,6 +21,9 @@ class DeepseekProvider(LLMProvider):
|
|
21
21
|
def __init__(
|
22
22
|
self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
|
23
23
|
):
|
24
|
+
# Always set a tools adapter so that even if the driver is unavailable,
|
25
|
+
# generic code paths that expect provider.execute_tool() continue to work.
|
26
|
+
self._tools_adapter = get_local_tools_adapter()
|
24
27
|
if not self.available:
|
25
28
|
self._driver = None
|
26
29
|
else:
|
@@ -1,40 +1,32 @@
|
|
1
1
|
from janito.llm.model import LLMModelInfo
|
2
2
|
|
3
3
|
MODEL_SPECS = {
|
4
|
-
"gemini-2.5-
|
5
|
-
name="gemini-2.5-
|
6
|
-
|
7
|
-
max_input=131072,
|
8
|
-
max_cot="N/A",
|
9
|
-
max_response=32768,
|
10
|
-
thinking_supported=True,
|
11
|
-
default_temp=0.2,
|
4
|
+
"gemini-2.5-flash": LLMModelInfo(
|
5
|
+
name="gemini-2.5-flash",
|
6
|
+
other={"description": "Google Gemini 2.5 Flash (OpenAI-compatible endpoint)"},
|
12
7
|
open="google",
|
13
|
-
driver="
|
14
|
-
|
15
|
-
|
16
|
-
"gemini-2.5-flash-preview-05-20": LLMModelInfo(
|
17
|
-
name="gemini-2.5-flash-preview-05-20",
|
18
|
-
context=1000000,
|
19
|
-
max_input=1000000,
|
20
|
-
max_cot="N/A",
|
21
|
-
max_response=65536,
|
8
|
+
driver="OpenAIModelDriver",
|
9
|
+
max_response=8192,
|
10
|
+
max_cot=24576,
|
22
11
|
thinking_supported=True,
|
23
|
-
default_temp=0.2,
|
24
|
-
open="google",
|
25
|
-
driver="GoogleGenaiModelDriver",
|
26
|
-
other={"preview": True, "flash": True},
|
27
12
|
),
|
28
|
-
|
29
|
-
name="gemini-2.5-
|
30
|
-
|
31
|
-
|
32
|
-
|
13
|
+
"gemini-2.5-pro": LLMModelInfo(
|
14
|
+
name="gemini-2.5-pro",
|
15
|
+
other={"description": "Google Gemini 2.5 Pro (OpenAI-compatible endpoint)"},
|
16
|
+
open="google",
|
17
|
+
driver="OpenAIModelDriver",
|
33
18
|
max_response=65536,
|
19
|
+
max_cot=196608,
|
34
20
|
thinking_supported=True,
|
35
|
-
|
21
|
+
),
|
22
|
+
"gemini-2.5-flash-lite-preview-06-17": LLMModelInfo(
|
23
|
+
name="gemini-2.5-flash-lite-preview-06-17",
|
24
|
+
other={"description": "Google Gemini 2.5 Flash-Lite Preview (OpenAI-compatible endpoint)"},
|
36
25
|
open="google",
|
37
|
-
driver="
|
38
|
-
|
26
|
+
driver="OpenAIModelDriver",
|
27
|
+
max_response=64000,
|
28
|
+
max_cot=192000,
|
29
|
+
thinking_supported=True,
|
39
30
|
),
|
31
|
+
# Add more Gemini models as needed
|
40
32
|
}
|
@@ -2,64 +2,78 @@ from janito.llm.provider import LLMProvider
|
|
2
2
|
from janito.llm.model import LLMModelInfo
|
3
3
|
from janito.llm.auth import LLMAuthManager
|
4
4
|
from janito.llm.driver_config import LLMDriverConfig
|
5
|
-
from janito.drivers.
|
6
|
-
from janito.tools
|
5
|
+
from janito.drivers.openai.driver import OpenAIModelDriver
|
6
|
+
from janito.tools import get_local_tools_adapter
|
7
7
|
from janito.providers.registry import LLMProviderRegistry
|
8
|
+
from queue import Queue
|
8
9
|
|
9
|
-
|
10
|
-
|
11
|
-
from
|
12
|
-
|
13
|
-
|
14
|
-
unavailable_reason = GoogleGenaiModelDriver.unavailable_reason
|
15
|
-
maintainer = "Needs maintainer"
|
16
|
-
|
10
|
+
# Import Google Gemini model specs (to be created or imported as needed)
|
11
|
+
try:
|
12
|
+
from .model_info import MODEL_SPECS
|
13
|
+
except ImportError:
|
14
|
+
MODEL_SPECS = {}
|
17
15
|
|
18
16
|
class GoogleProvider(LLMProvider):
|
19
|
-
MODEL_SPECS = MODEL_SPECS
|
20
|
-
maintainer = "Needs maintainer"
|
21
|
-
"""
|
22
|
-
Provider for Google LLMs via google-google.
|
23
|
-
Default model: 'gemini-2.5-pro-preview-05-06'.
|
24
|
-
"""
|
25
17
|
name = "google"
|
26
|
-
|
18
|
+
maintainer = "João Pinto <lamego.pinto@gmail.com>"
|
19
|
+
MODEL_SPECS = MODEL_SPECS
|
20
|
+
DEFAULT_MODEL = "gemini-2.5-flash" # Default Gemini model
|
27
21
|
|
28
|
-
def __init__(
|
22
|
+
def __init__(
|
23
|
+
self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
|
24
|
+
):
|
25
|
+
# Always have a tools adapter available to avoid AttributeError downstream when
|
26
|
+
# the driver is missing but other logic still relies on tools execution.
|
27
|
+
self._tools_adapter = get_local_tools_adapter()
|
29
28
|
if not self.available:
|
30
29
|
self._driver = None
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
self.
|
38
|
-
|
39
|
-
self.
|
40
|
-
|
41
|
-
|
30
|
+
else:
|
31
|
+
self.auth_manager = auth_manager or LLMAuthManager()
|
32
|
+
self._api_key = self.auth_manager.get_credentials(type(self).name)
|
33
|
+
self._tools_adapter = get_local_tools_adapter()
|
34
|
+
self._driver_config = config or LLMDriverConfig(model=None)
|
35
|
+
# Only set default if model is not set by CLI/config
|
36
|
+
if not getattr(self._driver_config, 'model', None):
|
37
|
+
self._driver_config.model = self.DEFAULT_MODEL
|
38
|
+
if not self._driver_config.api_key:
|
39
|
+
self._driver_config.api_key = self._api_key
|
40
|
+
# Set the Gemini API endpoint for OpenAI compatibility
|
41
|
+
self._driver_config.base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"
|
42
|
+
self.fill_missing_device_info(self._driver_config)
|
43
|
+
self._driver = None # to be provided by factory/agent
|
42
44
|
|
43
45
|
@property
|
44
|
-
def driver(self) ->
|
46
|
+
def driver(self) -> OpenAIModelDriver:
|
45
47
|
if not self.available:
|
46
|
-
raise ImportError(f"
|
48
|
+
raise ImportError(f"GoogleOpenAIProvider unavailable: {self.unavailable_reason}")
|
47
49
|
return self._driver
|
48
50
|
|
49
51
|
@property
|
50
52
|
def available(self):
|
51
|
-
return available
|
53
|
+
return OpenAIModelDriver.available
|
52
54
|
|
53
55
|
@property
|
54
56
|
def unavailable_reason(self):
|
55
|
-
return unavailable_reason
|
57
|
+
return OpenAIModelDriver.unavailable_reason
|
58
|
+
|
59
|
+
def create_driver(self):
|
60
|
+
"""
|
61
|
+
Creates and returns a new OpenAIModelDriver instance configured for Gemini API.
|
62
|
+
"""
|
63
|
+
driver = OpenAIModelDriver(
|
64
|
+
tools_adapter=self._tools_adapter, provider_name=self.name
|
65
|
+
)
|
66
|
+
driver.config = self._driver_config
|
67
|
+
return driver
|
56
68
|
|
57
|
-
|
58
|
-
|
69
|
+
@property
|
70
|
+
def model_name(self):
|
71
|
+
return self._driver_config.model
|
59
72
|
|
60
|
-
|
61
|
-
|
62
|
-
|
73
|
+
@property
|
74
|
+
def driver_config(self):
|
75
|
+
"""Public, read-only access to the provider's LLMDriverConfig object."""
|
76
|
+
return self._driver_config
|
63
77
|
|
64
78
|
def execute_tool(self, tool_name: str, event_bus, *args, **kwargs):
|
65
79
|
self._tools_adapter.event_bus = event_bus
|
@@ -3,7 +3,7 @@ from janito.llm.model import LLMModelInfo
|
|
3
3
|
from janito.llm.auth import LLMAuthManager
|
4
4
|
from janito.llm.driver_config import LLMDriverConfig
|
5
5
|
from janito.drivers.mistralai.driver import MistralAIModelDriver
|
6
|
-
from janito.tools
|
6
|
+
from janito.tools import get_local_tools_adapter
|
7
7
|
from janito.providers.registry import LLMProviderRegistry
|
8
8
|
|
9
9
|
from .model_info import MODEL_SPECS
|
@@ -24,12 +24,15 @@ class MistralAIProvider(LLMProvider):
|
|
24
24
|
def __init__(
|
25
25
|
self, config: LLMDriverConfig = None, auth_manager: LLMAuthManager = None
|
26
26
|
):
|
27
|
+
# Always instantiate a tools adapter so that provider.execute_tool() remains functional
|
28
|
+
# even when the driver cannot be constructed due to missing dependencies.
|
29
|
+
self._tools_adapter = get_local_tools_adapter()
|
27
30
|
if not self.available:
|
28
31
|
self._driver = None
|
29
32
|
return
|
30
33
|
self.auth_manager = auth_manager or LLMAuthManager()
|
31
34
|
self._api_key = self.auth_manager.get_credentials(type(self).name)
|
32
|
-
self._tools_adapter =
|
35
|
+
self._tools_adapter = get_local_tools_adapter()
|
33
36
|
self._info = config or LLMDriverConfig(model=None)
|
34
37
|
if not self._info.model:
|
35
38
|
self._info.model = self.DEFAULT_MODEL
|
@@ -24,6 +24,10 @@ class OpenAIProvider(LLMProvider):
|
|
24
24
|
self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
|
25
25
|
):
|
26
26
|
if not self.available:
|
27
|
+
# Even when the OpenAI driver is unavailable we still need a tools adapter
|
28
|
+
# so that any generic logic that expects `execute_tool()` to work does not
|
29
|
+
# crash with an AttributeError when it tries to access `self._tools_adapter`.
|
30
|
+
self._tools_adapter = get_local_tools_adapter()
|
27
31
|
self._driver = None
|
28
32
|
else:
|
29
33
|
self.auth_manager = auth_manager or LLMAuthManager()
|
@@ -1,14 +1,13 @@
|
|
1
1
|
# Provider static metadata registry for listing purposes (name, maintainer, and future fields)
|
2
2
|
STATIC_PROVIDER_METADATA = {
|
3
3
|
"openai": {
|
4
|
+
},
|
5
|
+
"google": {
|
4
6
|
"maintainer": "João Pinto <lamego.pinto@gmail.com>",
|
5
7
|
},
|
6
8
|
"azure_openai": {
|
7
9
|
"maintainer": "João Pinto <lamego.pinto@gmail.com>",
|
8
10
|
},
|
9
|
-
"google": {
|
10
|
-
"maintainer": "Needs maintainer",
|
11
|
-
},
|
12
11
|
"mistralai": {
|
13
12
|
"maintainer": "Needs maintainer",
|
14
13
|
},
|