janito 2.8.0__py3-none-any.whl → 2.10.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. janito/README.md +149 -0
  2. janito/cli/chat_mode/script_runner.py +2 -2
  3. janito/cli/chat_mode/session.py +24 -6
  4. janito/cli/chat_mode/shell/commands/_priv_status.py +13 -0
  5. janito/cli/cli_commands/list_config.py +31 -0
  6. janito/cli/cli_commands/list_profiles.py +79 -0
  7. janito/cli/cli_commands/show_config.py +4 -11
  8. janito/cli/core/getters.py +14 -1
  9. janito/cli/main_cli.py +52 -3
  10. janito/config.py +1 -0
  11. janito/config_manager.py +11 -2
  12. janito/docs/GETTING_STARTED.md +117 -0
  13. janito/docs/PROVIDERS.md +224 -0
  14. janito/drivers/azure_openai/driver.py +7 -0
  15. janito/drivers/openai/driver.py +10 -2
  16. janito/llm/provider.py +1 -1
  17. janito/mkdocs.yml +40 -0
  18. janito/provider_registry.py +14 -4
  19. janito/providers/__init__.py +1 -1
  20. janito/providers/alibaba/__init__.py +0 -0
  21. janito/providers/alibaba/model_info.py +33 -0
  22. janito/providers/alibaba/provider.py +102 -0
  23. janito/providers/anthropic/provider.py +6 -0
  24. janito/providers/azure_openai/provider.py +8 -2
  25. janito/providers/deepseek/provider.py +7 -1
  26. janito/providers/google/provider.py +7 -1
  27. janito/providers/moonshotai/model_info.py +11 -0
  28. janito/providers/moonshotai/provider.py +8 -2
  29. janito/providers/openai/provider.py +7 -1
  30. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/METADATA +8 -8
  31. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/RECORD +35 -28
  32. janito/providers/groq/__init__.py +0 -1
  33. janito/providers/groq/model_info.py +0 -46
  34. janito/providers/groq/provider.py +0 -76
  35. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/WHEEL +0 -0
  36. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/entry_points.txt +0 -0
  37. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/licenses/LICENSE +0 -0
  38. {janito-2.8.0.dist-info → janito-2.10.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,224 @@
1
+ # Provider Configuration Guide
2
+
3
+ This guide covers how to configure and use different LLM providers with Janito.
4
+
5
+ ## MoonshotAI (Recommended)
6
+
7
+ **MoonshotAI** is the recommended default provider for Janito, offering excellent performance and competitive pricing.
8
+
9
+ ### Setup
10
+ ```bash
11
+ # Set API key
12
+ janito --set-api-key YOUR_API_KEY -p moonshotai
13
+
14
+ # Set as default provider
15
+ janito --set provider=moonshotai
16
+ janito --set model=kimi-k1-8k
17
+ ```
18
+
19
+ ### Available Models
20
+
21
+ - **kimi-k1-8k**: Fast, general-purpose model (8k context)
22
+ - **kimi-k1-32k**: Extended context model (32k context)
23
+ - **kimi-k1-128k**: Long context model (128k context)
24
+ - **kimi-k2-turbo-preview**: Latest enhanced model
25
+
26
+ ### Environment Variables
27
+ ```bash
28
+ export MOONSHOTAI_API_KEY=your_key_here
29
+ ```
30
+
31
+ ## OpenAI
32
+
33
+ ### Setup
34
+ ```bash
35
+ # Set API key
36
+ janito --set-api-key YOUR_API_KEY -p openai
37
+
38
+ # Use specific model
39
+ janito -p openai -m gpt-4 "Your prompt"
40
+ ```
41
+
42
+ ### Available Models
43
+
44
+ - **gpt-4**: Most capable model
45
+ - **gpt-4-turbo**: Faster, more efficient
46
+ - **gpt-3.5-turbo**: Cost-effective option
47
+
48
+ ### Environment Variables
49
+ ```bash
50
+ export OPENAI_API_KEY=your_key_here
51
+ ```
52
+
53
+ ## Anthropic
54
+
55
+ ### Setup
56
+ ```bash
57
+ # Set API key
58
+ janito --set-api-key YOUR_API_KEY -p anthropic
59
+
60
+ # Use Claude models
61
+ janito -p anthropic -m claude-3-5-sonnet-20241022 "Your prompt"
62
+ ```
63
+
64
+ ### Available Models
65
+
66
+ - **claude-3-5-sonnet-20241022**: Most capable
67
+ - **claude-3-opus-20240229**: High performance
68
+ - **claude-3-haiku-20240307**: Fast and cost-effective
69
+
70
+ ### Environment Variables
71
+ ```bash
72
+ export ANTHROPIC_API_KEY=your_key_here
73
+ ```
74
+
75
+ ## Google
76
+
77
+ ### Setup
78
+ ```bash
79
+ # Set API key
80
+ janito --set-api-key YOUR_API_KEY -p google
81
+
82
+ # Use Gemini models
83
+ janito -p google -m gemini-2.0-flash-exp "Your prompt"
84
+ ```
85
+
86
+ ### Available Models
87
+
88
+ - **gemini-2.0-flash-exp**: Latest experimental model
89
+ - **gemini-1.5-pro**: Production-ready
90
+ - **gemini-1.5-flash**: Fast and efficient
91
+
92
+ ### Environment Variables
93
+ ```bash
94
+ export GOOGLE_API_KEY=your_key_here
95
+ ```
96
+
97
+ ## Azure OpenAI
98
+
99
+ ### Setup
100
+ ```bash
101
+ # Set configuration
102
+ janito --set-api-key YOUR_API_KEY -p azure-openai
103
+ janito --set azure_deployment_name=your_deployment_name -p azure-openai
104
+ ```
105
+
106
+ ### Configuration
107
+
108
+ Requires both API key and deployment name:
109
+
110
+ - **API Key**: Your Azure OpenAI key
111
+ - **Deployment Name**: Your Azure deployment name
112
+ - **Base URL**: Your Azure endpoint URL
113
+
114
+ ### Environment Variables
115
+ ```bash
116
+ export AZURE_OPENAI_API_KEY=your_key_here
117
+ export AZURE_OPENAI_ENDPOINT=https://your-resource.openai.azure.com/
118
+ ```
119
+
120
+ ## Other Providers
121
+
122
+ Janito also supports these providers through OpenAI-compatible APIs:
123
+
124
+ ### Alibaba Cloud
125
+ ```bash
126
+ janito --set-api-key YOUR_KEY -p alibaba
127
+ ```
128
+
129
+ ### DeepSeek
130
+ ```bash
131
+ janito --set-api-key YOUR_KEY -p deepseek
132
+ ```
133
+
134
+ ### Groq
135
+ ```bash
136
+ janito --set-api-key YOUR_KEY -p groq
137
+ ```
138
+
139
+ ### Mistral
140
+ ```bash
141
+ janito --set-api-key YOUR_KEY -p mistral
142
+ ```
143
+
144
+ ## Configuration Management
145
+
146
+ ### Check Current Configuration
147
+ ```bash
148
+ janito --show-config
149
+ ```
150
+
151
+ ### List All Providers
152
+ ```bash
153
+ janito --list-providers
154
+ ```
155
+
156
+ ### List Models for a Provider
157
+ ```bash
158
+ janito -p moonshotai --list-models
159
+ janito -p openai --list-models
160
+ ```
161
+
162
+ ### Switch Providers
163
+ ```bash
164
+ # Temporarily for one command
165
+ janito -p openai -m gpt-4 "Your prompt"
166
+
167
+ # Permanently as default
168
+ janito --set provider=openai
169
+ janito --set model=gpt-4
170
+ ```
171
+
172
+ ## Advanced Configuration
173
+
174
+ ### Custom Base URLs
175
+ For OpenAI-compatible providers, you can set custom base URLs:
176
+
177
+ ```bash
178
+ janito --set base_url=https://your-custom-endpoint.com -p openai
179
+ ```
180
+
181
+ ### Provider-Specific Settings
182
+ Each provider can have custom settings:
183
+
184
+ ```bash
185
+ # Set temperature for a specific provider/model
186
+ janito --set temperature=0.7 -p moonshotai -m kimi-k1-8k
187
+
188
+ # Set max tokens
189
+ janito --set max_tokens=2000 -p openai -m gpt-4
190
+ ```
191
+
192
+ ## Troubleshooting
193
+
194
+ ### Provider Not Found
195
+ ```bash
196
+ # Check if provider is registered
197
+ janito --list-providers
198
+
199
+ # Re-register provider
200
+ janito --set-api-key YOUR_KEY -p PROVIDER_NAME
201
+ ```
202
+
203
+ ### API Key Issues
204
+ ```bash
205
+ # Check current API key
206
+ janito --show-config
207
+
208
+ # Reset API key
209
+ janito --set-api-key NEW_KEY -p PROVIDER_NAME
210
+ ```
211
+
212
+ ### Model Not Available
213
+ ```bash
214
+ # List available models for provider
215
+ janito -p PROVIDER_NAME --list-models
216
+ ```
217
+
218
+ ## Best Practices
219
+
220
+ 1. **Start with MoonshotAI**: It's the recommended default for good reason
221
+ 2. **Use environment variables**: For CI/CD and containerized environments
222
+ 3. **Test different models**: Each has different strengths and pricing
223
+ 4. **Monitor usage**: Keep track of API costs and rate limits
224
+ 5. **Use profiles**: Set up different configurations for different use cases
@@ -63,6 +63,13 @@ class AzureOpenAIModelDriver(OpenAIModelDriver):
63
63
 
64
64
  def _instantiate_openai_client(self, config):
65
65
  try:
66
+ if not config.api_key:
67
+ provider_name = getattr(self, 'provider_name', 'Azure OpenAI')
68
+ print(f"[ERROR] No API key found for provider '{provider_name}'. Please set the API key using:")
69
+ print(f" janito --set-api-key YOUR_API_KEY -p azure-openai")
70
+ print(f"Or set the AZURE_OPENAI_API_KEY environment variable.")
71
+ raise ValueError(f"API key is required for provider '{provider_name}'")
72
+
66
73
  from openai import AzureOpenAI
67
74
 
68
75
  api_key_display = str(config.api_key)
@@ -74,8 +74,9 @@ class OpenAIModelDriver(LLMDriver):
74
74
  api_kwargs[p] = v
75
75
  api_kwargs["messages"] = conversation
76
76
  api_kwargs["stream"] = False
77
- if self.tools_adapter and self.tools_adapter.get_tool_classes():
78
- api_kwargs["parallel_tool_calls"] = True
77
+ # Always return the prepared kwargs, even if no tools are registered. The
78
+ # OpenAI Python SDK expects a **mapping** – passing *None* will raise
79
+ # ``TypeError: argument after ** must be a mapping, not NoneType``.
79
80
  return api_kwargs
80
81
 
81
82
  def _call_api(self, driver_input: DriverInput):
@@ -245,6 +246,13 @@ class OpenAIModelDriver(LLMDriver):
245
246
 
246
247
  def _instantiate_openai_client(self, config):
247
248
  try:
249
+ if not config.api_key:
250
+ provider_name = getattr(self, 'provider_name', 'OpenAI-compatible')
251
+ print(f"[ERROR] No API key found for provider '{provider_name}'. Please set the API key using:")
252
+ print(f" janito --set-api-key YOUR_API_KEY -p {provider_name.lower()}")
253
+ print(f"Or set the {provider_name.upper()}_API_KEY environment variable.")
254
+ raise ValueError(f"API key is required for provider '{provider_name}'")
255
+
248
256
  api_key_display = str(config.api_key)
249
257
  if api_key_display and len(api_key_display) > 8:
250
258
  api_key_display = api_key_display[:4] + "..." + api_key_display[-4:]
janito/llm/provider.py CHANGED
@@ -17,7 +17,7 @@ class LLMProvider(ABC):
17
17
  Abstract base class for Large Language Model (LLM) providers.
18
18
 
19
19
  Provider Usage and Driver Communication Flow:
20
- 1. Provider class is selected (e.g., OpenAIProvider, MistralProvider).
20
+ 1. Provider class is selected (e.g., OpenAIProvider).
21
21
  2. An instance of the provider is created. This instance is bound to a specific configuration (LLMDriverConfig) containing model, credentials, etc.
22
22
  3. All drivers created by that provider instance are associated with the bound config.
23
23
  4. To communicate with an LLM, call create_driver() on the provider instance, which yields a driver configured for the attached config. Every driver created via this method inherits the provider's configuration.
janito/mkdocs.yml ADDED
@@ -0,0 +1,40 @@
1
+ site_name: Janito CLI Documentation
2
+ site_description: A powerful command-line tool for running LLM-powered workflows
3
+ site_url: https://janito.readthedocs.io/
4
+
5
+ nav:
6
+ - Home: README.md
7
+ - Getting Started: docs/GETTING_STARTED.md
8
+ - Providers: docs/PROVIDERS.md
9
+ - Tools: docs/TOOLS_REFERENCE.md
10
+
11
+ markdown_extensions:
12
+ - admonition
13
+ - codehilite
14
+ - toc:
15
+ permalink: true
16
+ - pymdownx.superfences
17
+ - pymdownx.tabbed:
18
+ alternate_style: true
19
+
20
+ theme:
21
+ name: material
22
+ palette:
23
+ - scheme: default
24
+ primary: blue
25
+ accent: blue
26
+ features:
27
+ - navigation.tabs
28
+ - navigation.sections
29
+ - navigation.expand
30
+ - search.highlight
31
+ - search.share
32
+
33
+ repo_url: https://github.com/ikignosis/janito
34
+ repo_name: ikignosis/janito
35
+ edit_uri: edit/main/docs/
36
+
37
+ extra:
38
+ social:
39
+ - icon: fontawesome/brands/github
40
+ link: https://github.com/ikignosis/janito
@@ -93,11 +93,21 @@ class ProviderRegistry:
93
93
  model_specs = model_info_mod.MODEL_SPECS
94
94
  elif hasattr(model_info_mod, "MOONSHOTAI_MODEL_SPECS"):
95
95
  model_specs = model_info_mod.MOONSHOTAI_MODEL_SPECS
96
-
97
- if provider_name == "groq":
98
- return "<any> (must be provided)"
96
+
99
97
  if model_specs:
100
- return ", ".join(model_specs.keys())
98
+ default_model = getattr(provider_class, "DEFAULT_MODEL", None)
99
+ model_names = []
100
+
101
+ for model_key in model_specs.keys():
102
+ if model_key == default_model:
103
+ # Highlight the default model with color and star icon
104
+ model_names.append(f"[bold green]⭐ {model_key}[/bold green]")
105
+ else:
106
+ model_names.append(model_key)
107
+
108
+ if provider_name == "moonshotai":
109
+ return ", ".join(model_names)
110
+ return ", ".join(model_names)
101
111
  return "-"
102
112
  except Exception as e:
103
113
  return "-"
@@ -5,4 +5,4 @@ import janito.providers.azure_openai.provider
5
5
  import janito.providers.anthropic.provider
6
6
  import janito.providers.deepseek.provider
7
7
  import janito.providers.moonshotai.provider
8
- import janito.providers.groq.provider
8
+ import janito.providers.alibaba.provider
File without changes
@@ -0,0 +1,33 @@
1
+ from janito.llm.model import LLMModelInfo
2
+
3
+ MODEL_SPECS = {
4
+ "qwen-turbo": LLMModelInfo(
5
+ name="qwen-turbo",
6
+ context=1008192,
7
+ max_response=8192,
8
+ category="Alibaba Qwen Turbo Model (OpenAI-compatible)",
9
+ driver="OpenAIModelDriver",
10
+ ),
11
+ "qwen-plus": LLMModelInfo(
12
+ name="qwen-plus",
13
+ context=131072,
14
+ max_response=8192,
15
+ category="Alibaba Qwen Plus Model (OpenAI-compatible)",
16
+ driver="OpenAIModelDriver",
17
+ ),
18
+ "qwen-max": LLMModelInfo(
19
+ name="qwen-max",
20
+ context=32768,
21
+ max_response=8192,
22
+ category="Alibaba Qwen Max Model (OpenAI-compatible)",
23
+ driver="OpenAIModelDriver",
24
+ ),
25
+
26
+ "qwen3-coder-plus": LLMModelInfo(
27
+ name="qwen3-coder-plus",
28
+ context=1048576,
29
+ max_response=65536,
30
+ category="Alibaba Qwen3 Coder Plus Model (OpenAI-compatible)",
31
+ driver="OpenAIModelDriver",
32
+ ),
33
+ }
@@ -0,0 +1,102 @@
1
+ from janito.llm.provider import LLMProvider
2
+ from janito.llm.model import LLMModelInfo
3
+ from janito.llm.auth import LLMAuthManager
4
+ from janito.llm.driver_config import LLMDriverConfig
5
+ from janito.drivers.openai.driver import OpenAIModelDriver
6
+ from janito.tools import get_local_tools_adapter
7
+ from janito.providers.registry import LLMProviderRegistry
8
+ from .model_info import MODEL_SPECS
9
+ from queue import Queue
10
+
11
+ available = OpenAIModelDriver.available
12
+ unavailable_reason = OpenAIModelDriver.unavailable_reason
13
+
14
+
15
+ class AlibabaProvider(LLMProvider):
16
+ name = "alibaba"
17
+ NAME = "alibaba"
18
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
19
+ MODEL_SPECS = MODEL_SPECS
20
+ DEFAULT_MODEL = "qwen-turbo" # Options: qwen-turbo, qwen-plus, qwen-max
21
+
22
+ def __init__(
23
+ self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
24
+ ):
25
+ # Always set a tools adapter so that even if the driver is unavailable,
26
+ # generic code paths that expect provider.execute_tool() continue to work.
27
+ self._tools_adapter = get_local_tools_adapter()
28
+
29
+ # Always initialize _driver_config to avoid AttributeError
30
+ self._driver_config = config or LLMDriverConfig(model=None)
31
+
32
+ if not self.available:
33
+ self._driver = None
34
+ else:
35
+ self.auth_manager = auth_manager or LLMAuthManager()
36
+ self._api_key = self.auth_manager.get_credentials(type(self).NAME)
37
+ if not self._api_key:
38
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
39
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
40
+ print(f"Or set the ALIBABA_API_KEY environment variable.")
41
+
42
+ if not self._driver_config.model:
43
+ self._driver_config.model = self.DEFAULT_MODEL
44
+ if not self._driver_config.api_key:
45
+ self._driver_config.api_key = self._api_key
46
+ # Set Alibaba international endpoint as default base_url if not provided
47
+ if not getattr(self._driver_config, "base_url", None):
48
+ self._driver_config.base_url = "https://dashscope-intl.aliyuncs.com/compatible-mode/v1"
49
+ self.fill_missing_device_info(self._driver_config)
50
+ self._driver = None # to be provided by factory/agent
51
+
52
+ @property
53
+ def driver(self) -> OpenAIModelDriver:
54
+ if not self.available:
55
+ raise ImportError(f"AlibabaProvider unavailable: {self.unavailable_reason}")
56
+ return self._driver
57
+
58
+ @property
59
+ def available(self):
60
+ return available
61
+
62
+ @property
63
+ def unavailable_reason(self):
64
+ return unavailable_reason
65
+
66
+ def create_driver(self):
67
+ """
68
+ Creates and returns a new OpenAIModelDriver instance with input/output queues.
69
+ """
70
+ driver = OpenAIModelDriver(
71
+ tools_adapter=self._tools_adapter, provider_name=self.name
72
+ )
73
+ driver.config = self.driver_config
74
+ # NOTE: The caller is responsible for calling driver.start() if background processing is needed.
75
+ return driver
76
+
77
+ def create_agent(self, tools_adapter=None, agent_name: str = None, **kwargs):
78
+ from janito.llm.agent import LLMAgent
79
+
80
+ # Always create a new driver with the passed-in tools_adapter
81
+ if tools_adapter is None:
82
+ tools_adapter = get_local_tools_adapter()
83
+ # Should use new-style driver construction via queues/factory (handled elsewhere)
84
+ raise NotImplementedError(
85
+ "create_agent must be constructed via new factory using input/output queues and config."
86
+ )
87
+
88
+ @property
89
+ def model_name(self):
90
+ return self._driver_config.model
91
+
92
+ @property
93
+ def driver_config(self):
94
+ """Public, read-only access to the provider's LLMDriverConfig object."""
95
+ return self._driver_config
96
+
97
+ def execute_tool(self, tool_name: str, event_bus, *args, **kwargs):
98
+ self._tools_adapter.event_bus = event_bus
99
+ return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
100
+
101
+
102
+ LLMProviderRegistry.register(AlibabaProvider.NAME, AlibabaProvider)
@@ -21,6 +21,12 @@ class AnthropicProvider(LLMProvider):
21
21
  self._tools_adapter = get_local_tools_adapter()
22
22
  self.auth_manager = auth_manager or LLMAuthManager()
23
23
  self._api_key = self.auth_manager.get_credentials(type(self).NAME)
24
+ if not self._api_key:
25
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
26
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
27
+ print(f"Or set the ANTHROPIC_API_KEY environment variable.")
28
+ return
29
+
24
30
  self._tools_adapter = get_local_tools_adapter()
25
31
  self._driver_config = config or LLMDriverConfig(model=None)
26
32
  if not getattr(self._driver_config, "model", None):
@@ -11,13 +11,13 @@ from janito.drivers.azure_openai.driver import AzureOpenAIModelDriver
11
11
 
12
12
  available = AzureOpenAIModelDriver.available
13
13
  unavailable_reason = AzureOpenAIModelDriver.unavailable_reason
14
- maintainer = "João Pinto <lamego.pinto@gmail.com>"
14
+ maintainer = "João Pinto <janito@ikignosis.org>"
15
15
 
16
16
 
17
17
  class AzureOpenAIProvider(LLMProvider):
18
18
  name = "azure_openai"
19
19
  NAME = "azure_openai"
20
- MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
20
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
21
21
  MODEL_SPECS = MODEL_SPECS
22
22
  DEFAULT_MODEL = "azure_openai_deployment"
23
23
 
@@ -32,6 +32,12 @@ class AzureOpenAIProvider(LLMProvider):
32
32
  return
33
33
  self._auth_manager = auth_manager or LLMAuthManager()
34
34
  self._api_key = self._auth_manager.get_credentials(type(self).NAME)
35
+ if not self._api_key:
36
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
37
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
38
+ print(f"Or set the AZURE_OPENAI_API_KEY environment variable.")
39
+ return
40
+
35
41
  self._tools_adapter = get_local_tools_adapter()
36
42
  self._driver_config = config or LLMDriverConfig(model=None)
37
43
  if not self._driver_config.model:
@@ -15,7 +15,7 @@ unavailable_reason = OpenAIModelDriver.unavailable_reason
15
15
  class DeepSeekProvider(LLMProvider):
16
16
  name = "deepseek"
17
17
  NAME = "deepseek"
18
- MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
18
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
19
19
  MODEL_SPECS = MODEL_SPECS
20
20
  DEFAULT_MODEL = "deepseek-chat" # Options: deepseek-chat, deepseek-reasoner
21
21
 
@@ -30,6 +30,12 @@ class DeepSeekProvider(LLMProvider):
30
30
  else:
31
31
  self.auth_manager = auth_manager or LLMAuthManager()
32
32
  self._api_key = self.auth_manager.get_credentials(type(self).NAME)
33
+ if not self._api_key:
34
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
35
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
36
+ print(f"Or set the DEEPSEEK_API_KEY environment variable.")
37
+ return
38
+
33
39
  self._tools_adapter = get_local_tools_adapter()
34
40
  self._driver_config = config or LLMDriverConfig(model=None)
35
41
  if not self._driver_config.model:
@@ -17,7 +17,7 @@ except ImportError:
17
17
  class GoogleProvider(LLMProvider):
18
18
  name = "google"
19
19
  NAME = "google"
20
- MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
20
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
21
21
  MODEL_SPECS = MODEL_SPECS
22
22
  DEFAULT_MODEL = "gemini-2.5-flash" # Default Gemini model
23
23
 
@@ -32,6 +32,12 @@ class GoogleProvider(LLMProvider):
32
32
  else:
33
33
  self.auth_manager = auth_manager or LLMAuthManager()
34
34
  self._api_key = self.auth_manager.get_credentials(type(self).name)
35
+ if not self._api_key:
36
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
37
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
38
+ print(f"Or set the GOOGLE_API_KEY environment variable.")
39
+ return
40
+
35
41
  self._tools_adapter = get_local_tools_adapter()
36
42
  self._driver_config = config or LLMDriverConfig(model=None)
37
43
  # Only set default if model is not set by CLI/config
@@ -12,4 +12,15 @@ MOONSHOTAI_MODEL_SPECS = {
12
12
  open="moonshotai",
13
13
  driver="OpenAIModelDriver",
14
14
  ),
15
+ "kimi-k2-turbo-preview": LLMModelInfo(
16
+ name="kimi-k2-turbo-preview",
17
+ context=128000,
18
+ max_input=100000,
19
+ max_cot="N/A",
20
+ max_response=4096,
21
+ thinking_supported=False,
22
+ default_temp=0.2,
23
+ open="moonshotai",
24
+ driver="OpenAIModelDriver",
25
+ ),
15
26
  }
@@ -10,9 +10,9 @@ from .model_info import MOONSHOTAI_MODEL_SPECS
10
10
  class MoonshotAIProvider(LLMProvider):
11
11
  name = "moonshotai"
12
12
  NAME = "moonshotai"
13
- MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
13
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
14
14
  MODEL_SPECS = MOONSHOTAI_MODEL_SPECS
15
- DEFAULT_MODEL = "kimi-k2-0711-preview"
15
+ DEFAULT_MODEL = "kimi-k2-turbo-preview"
16
16
 
17
17
  def __init__(
18
18
  self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
@@ -23,6 +23,12 @@ class MoonshotAIProvider(LLMProvider):
23
23
  else:
24
24
  self.auth_manager = auth_manager or LLMAuthManager()
25
25
  self._api_key = self.auth_manager.get_credentials(type(self).name)
26
+ if not self._api_key:
27
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
28
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
29
+ print(f"Or set the MOONSHOTAI_API_KEY environment variable.")
30
+ return
31
+
26
32
  self._tools_adapter = get_local_tools_adapter()
27
33
  self._driver_config = config or LLMDriverConfig(model=None)
28
34
  if not self._driver_config.model:
@@ -15,7 +15,7 @@ unavailable_reason = OpenAIModelDriver.unavailable_reason
15
15
  class OpenAIProvider(LLMProvider):
16
16
  name = "openai"
17
17
  NAME = "openai"
18
- MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
18
+ MAINTAINER = "João Pinto <janito@ikignosis.org>"
19
19
  MODEL_SPECS = MODEL_SPECS
20
20
  DEFAULT_MODEL = "gpt-4.1" # Options: gpt-4.1, gpt-4o, o3-mini, o4-mini,
21
21
 
@@ -31,6 +31,12 @@ class OpenAIProvider(LLMProvider):
31
31
  else:
32
32
  self.auth_manager = auth_manager or LLMAuthManager()
33
33
  self._api_key = self.auth_manager.get_credentials(type(self).NAME)
34
+ if not self._api_key:
35
+ print(f"[ERROR] No API key found for provider '{self.name}'. Please set the API key using:")
36
+ print(f" janito --set-api-key YOUR_API_KEY -p {self.name}")
37
+ print(f"Or set the OPENAI_API_KEY environment variable.")
38
+ return
39
+
34
40
  self._tools_adapter = get_local_tools_adapter()
35
41
  self._driver_config = config or LLMDriverConfig(model=None)
36
42
  if not self._driver_config.model: