janito 2.7.0__py3-none-any.whl → 2.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (116) hide show
  1. janito/__init__.py +0 -1
  2. janito/__main__.py +0 -1
  3. janito/_version.py +0 -3
  4. janito/agent/setup_agent.py +77 -10
  5. janito/agent/templates/profiles/{system_prompt_template_plain_software_developer.txt.j2 → system_prompt_template_Developer_with_Python_Tools.txt.j2} +5 -1
  6. janito/agent/templates/profiles/system_prompt_template_developer.txt.j2 +3 -12
  7. janito/cli/__init__.py +0 -1
  8. janito/cli/chat_mode/bindings.py +1 -1
  9. janito/cli/chat_mode/chat_entry.py +0 -2
  10. janito/cli/chat_mode/prompt_style.py +0 -3
  11. janito/cli/chat_mode/script_runner.py +9 -5
  12. janito/cli/chat_mode/session.py +80 -35
  13. janito/cli/chat_mode/session_profile_select.py +61 -52
  14. janito/cli/chat_mode/shell/commands/__init__.py +1 -5
  15. janito/cli/chat_mode/shell/commands/_priv_check.py +1 -0
  16. janito/cli/chat_mode/shell/commands/bang.py +10 -3
  17. janito/cli/chat_mode/shell/commands/conversation_restart.py +24 -7
  18. janito/cli/chat_mode/shell/commands/execute.py +22 -7
  19. janito/cli/chat_mode/shell/commands/help.py +4 -1
  20. janito/cli/chat_mode/shell/commands/model.py +13 -5
  21. janito/cli/chat_mode/shell/commands/privileges.py +21 -0
  22. janito/cli/chat_mode/shell/commands/prompt.py +0 -2
  23. janito/cli/chat_mode/shell/commands/read.py +22 -5
  24. janito/cli/chat_mode/shell/commands/tools.py +15 -4
  25. janito/cli/chat_mode/shell/commands/write.py +22 -5
  26. janito/cli/chat_mode/shell/input_history.py +3 -1
  27. janito/cli/chat_mode/shell/session/manager.py +0 -2
  28. janito/cli/chat_mode/toolbar.py +25 -19
  29. janito/cli/cli_commands/list_models.py +1 -1
  30. janito/cli/cli_commands/list_providers.py +1 -0
  31. janito/cli/cli_commands/list_tools.py +35 -7
  32. janito/cli/cli_commands/model_utils.py +5 -3
  33. janito/cli/cli_commands/show_config.py +12 -0
  34. janito/cli/cli_commands/show_system_prompt.py +23 -9
  35. janito/cli/config.py +0 -13
  36. janito/cli/core/getters.py +2 -0
  37. janito/cli/core/runner.py +25 -8
  38. janito/cli/core/setters.py +13 -76
  39. janito/cli/main_cli.py +9 -25
  40. janito/cli/prompt_core.py +19 -18
  41. janito/cli/prompt_setup.py +6 -3
  42. janito/cli/rich_terminal_reporter.py +19 -5
  43. janito/cli/single_shot_mode/handler.py +14 -5
  44. janito/cli/verbose_output.py +5 -1
  45. janito/config_manager.py +4 -0
  46. janito/drivers/azure_openai/driver.py +27 -30
  47. janito/drivers/openai/driver.py +52 -36
  48. janito/formatting_token.py +12 -4
  49. janito/llm/agent.py +15 -6
  50. janito/llm/driver.py +1 -0
  51. janito/provider_registry.py +31 -70
  52. janito/providers/__init__.py +1 -0
  53. janito/providers/anthropic/model_info.py +0 -1
  54. janito/providers/anthropic/provider.py +9 -14
  55. janito/providers/azure_openai/provider.py +9 -4
  56. janito/providers/deepseek/provider.py +5 -4
  57. janito/providers/google/model_info.py +4 -2
  58. janito/providers/google/provider.py +11 -5
  59. janito/providers/groq/__init__.py +1 -0
  60. janito/providers/groq/model_info.py +46 -0
  61. janito/providers/groq/provider.py +76 -0
  62. janito/providers/moonshotai/provider.py +11 -4
  63. janito/providers/openai/model_info.py +0 -1
  64. janito/providers/openai/provider.py +6 -7
  65. janito/tools/__init__.py +2 -0
  66. janito/tools/adapters/local/__init__.py +2 -1
  67. janito/tools/adapters/local/adapter.py +21 -4
  68. janito/tools/adapters/local/ask_user.py +1 -0
  69. janito/tools/adapters/local/copy_file.py +1 -0
  70. janito/tools/adapters/local/create_directory.py +1 -0
  71. janito/tools/adapters/local/create_file.py +1 -0
  72. janito/tools/adapters/local/delete_text_in_file.py +2 -1
  73. janito/tools/adapters/local/fetch_url.py +1 -0
  74. janito/tools/adapters/local/find_files.py +7 -6
  75. janito/tools/adapters/local/get_file_outline/core.py +1 -0
  76. janito/tools/adapters/local/get_file_outline/java_outline.py +22 -15
  77. janito/tools/adapters/local/get_file_outline/search_outline.py +1 -0
  78. janito/tools/adapters/local/move_file.py +1 -0
  79. janito/tools/adapters/local/open_html_in_browser.py +15 -5
  80. janito/tools/adapters/local/open_url.py +1 -0
  81. janito/tools/adapters/local/python_code_run.py +1 -0
  82. janito/tools/adapters/local/python_command_run.py +1 -0
  83. janito/tools/adapters/local/python_file_run.py +1 -0
  84. janito/tools/adapters/local/read_files.py +19 -4
  85. janito/tools/adapters/local/remove_directory.py +1 -0
  86. janito/tools/adapters/local/remove_file.py +1 -0
  87. janito/tools/adapters/local/replace_text_in_file.py +4 -3
  88. janito/tools/adapters/local/run_bash_command.py +1 -0
  89. janito/tools/adapters/local/run_powershell_command.py +1 -0
  90. janito/tools/adapters/local/search_text/core.py +18 -17
  91. janito/tools/adapters/local/search_text/match_lines.py +5 -5
  92. janito/tools/adapters/local/search_text/pattern_utils.py +1 -1
  93. janito/tools/adapters/local/search_text/traverse_directory.py +7 -7
  94. janito/tools/adapters/local/validate_file_syntax/core.py +1 -1
  95. janito/tools/adapters/local/validate_file_syntax/html_validator.py +8 -1
  96. janito/tools/disabled_tools.py +68 -0
  97. janito/tools/path_security.py +18 -11
  98. janito/tools/permissions.py +6 -0
  99. janito/tools/permissions_parse.py +4 -3
  100. janito/tools/tool_base.py +11 -5
  101. janito/tools/tool_use_tracker.py +1 -4
  102. janito/tools/tool_utils.py +1 -1
  103. janito/tools/tools_adapter.py +57 -25
  104. {janito-2.7.0.dist-info → janito-2.8.0.dist-info}/METADATA +4 -12
  105. janito-2.8.0.dist-info/RECORD +202 -0
  106. janito/cli/chat_mode/shell/commands/livelogs.py +0 -49
  107. janito/drivers/mistralai/driver.py +0 -41
  108. janito/providers/mistralai/model_info.py +0 -37
  109. janito/providers/mistralai/provider.py +0 -72
  110. janito/providers/provider_static_info.py +0 -21
  111. janito-2.7.0.dist-info/RECORD +0 -202
  112. /janito/agent/templates/profiles/{system_prompt_template_assistant.txt.j2 → system_prompt_template_model_conversation_without_tools_or_context.txt.j2} +0 -0
  113. {janito-2.7.0.dist-info → janito-2.8.0.dist-info}/WHEEL +0 -0
  114. {janito-2.7.0.dist-info → janito-2.8.0.dist-info}/entry_points.txt +0 -0
  115. {janito-2.7.0.dist-info → janito-2.8.0.dist-info}/licenses/LICENSE +0 -0
  116. {janito-2.7.0.dist-info → janito-2.8.0.dist-info}/top_level.txt +0 -0
@@ -25,7 +25,9 @@ def format_tokens(n, tag=None, use_rich=False):
25
25
  return val
26
26
 
27
27
 
28
- def format_token_message_summary(msg_count, usage, width=96, use_rich=False, elapsed=None):
28
+ def format_token_message_summary(
29
+ msg_count, usage, width=96, use_rich=False, elapsed=None
30
+ ):
29
31
  """
30
32
  Returns a string (rich or pt markup) summarizing message count, last token usage, and elapsed time.
31
33
  """
@@ -40,16 +42,22 @@ def format_token_message_summary(msg_count, usage, width=96, use_rich=False, ela
40
42
  f"Completion: {format_tokens(completion_tokens, 'tokens_out', use_rich)}, "
41
43
  f"Total: {format_tokens(total_tokens, 'tokens_total', use_rich)}"
42
44
  )
43
- elapsed_part = f" | Elapsed: [cyan]{elapsed:.2f}s[/cyan]" if elapsed is not None else ""
45
+ elapsed_part = (
46
+ f" | Elapsed: [cyan]{elapsed:.2f}s[/cyan]" if elapsed is not None else ""
47
+ )
44
48
  return f"{left}{tokens_part}{elapsed_part}"
45
49
 
46
50
 
47
- def print_token_message_summary(console, msg_count=None, usage=None, width=96, elapsed=None):
51
+ def print_token_message_summary(
52
+ console, msg_count=None, usage=None, width=96, elapsed=None
53
+ ):
48
54
  """Prints the summary using rich markup, using defaults from perf_singleton if not given. Optionally includes elapsed time."""
49
55
  if usage is None:
50
56
  usage = performance_collector.get_last_request_usage()
51
57
  if msg_count is None:
52
58
  msg_count = performance_collector.get_total_turns() or 0
53
- line = format_token_message_summary(msg_count, usage, width, use_rich=True, elapsed=elapsed)
59
+ line = format_token_message_summary(
60
+ msg_count, usage, width, use_rich=True, elapsed=elapsed
61
+ )
54
62
  if line.strip():
55
63
  console.print(Rule(line))
janito/llm/agent.py CHANGED
@@ -97,6 +97,7 @@ class LLMAgent:
97
97
  # Refresh allowed_permissions in context before rendering
98
98
  from janito.tools.permissions import get_global_allowed_permissions
99
99
  from janito.tools.tool_base import ToolPermissions
100
+
100
101
  perms = get_global_allowed_permissions()
101
102
  if isinstance(perms, ToolPermissions):
102
103
  perm_str = ""
@@ -171,7 +172,7 @@ class LLMAgent:
171
172
  )
172
173
 
173
174
  def _process_next_response(
174
- self, poll_timeout: float = 1.0, max_wait_time: float = 300.0
175
+ self, poll_timeout: float = 1.0, max_wait_time: float = 600.0
175
176
  ):
176
177
  """
177
178
  Wait for a single event from the output queue (with timeout), process it, and return the result.
@@ -213,7 +214,6 @@ class LLMAgent:
213
214
  ]:
214
215
  return (event, False)
215
216
 
216
-
217
217
  def _get_event_from_output_queue(self, poll_timeout):
218
218
  try:
219
219
  return self.output_queue.get(timeout=poll_timeout)
@@ -306,6 +306,7 @@ class LLMAgent:
306
306
  config = self.llm_provider.driver_config
307
307
  loop_count = 1
308
308
  import threading
309
+
309
310
  cancel_event = threading.Event()
310
311
  while True:
311
312
  self._print_verbose_chat_loop(loop_count)
@@ -317,7 +318,9 @@ class LLMAgent:
317
318
  cancel_event.set()
318
319
  raise
319
320
  if getattr(self, "verbose_agent", False):
320
- print(f"[agent] [DEBUG] Returned from _process_next_response: result={result}, added_tool_results={added_tool_results}")
321
+ print(
322
+ f"[agent] [DEBUG] Returned from _process_next_response: result={result}, added_tool_results={added_tool_results}"
323
+ )
321
324
  if self._should_exit_chat_loop(result, added_tool_results):
322
325
  return result
323
326
  loop_count += 1
@@ -332,11 +335,15 @@ class LLMAgent:
332
335
  def _should_exit_chat_loop(self, result, added_tool_results):
333
336
  if result is None:
334
337
  if getattr(self, "verbose_agent", False):
335
- print("[agent] [INFO] Exiting chat loop: _process_next_response returned None result (likely timeout or error). Returning (None, False).")
338
+ print(
339
+ "[agent] [INFO] Exiting chat loop: _process_next_response returned None result (likely timeout or error). Returning (None, False)."
340
+ )
336
341
  return True
337
342
  if not added_tool_results:
338
343
  if getattr(self, "verbose_agent", False):
339
- print(f"[agent] [INFO] Exiting chat loop: _process_next_response returned added_tool_results=False (final response or no more tool calls). Returning result: {result}")
344
+ print(
345
+ f"[agent] [INFO] Exiting chat loop: _process_next_response returned added_tool_results=False (final response or no more tool calls). Returning result: {result}"
346
+ )
340
347
  return True
341
348
  return False
342
349
 
@@ -434,7 +441,9 @@ class LLMAgent:
434
441
  config.model = model_name
435
442
  config.temperature = self._safe_float(getattr(model_spec, "default_temp", None))
436
443
  config.max_tokens = self._safe_int(getattr(model_spec, "max_response", None))
437
- config.max_completion_tokens = self._safe_int(getattr(model_spec, "max_cot", None))
444
+ config.max_completion_tokens = self._safe_int(
445
+ getattr(model_spec, "max_cot", None)
446
+ )
438
447
  config.top_p = None
439
448
  config.presence_penalty = None
440
449
  config.frequency_penalty = None
janito/llm/driver.py CHANGED
@@ -55,6 +55,7 @@ class LLMDriver(ABC):
55
55
  # Validate all tool schemas before starting the thread
56
56
  if self.tools_adapter is not None:
57
57
  from janito.tools.tools_schema import ToolSchemaBase
58
+
58
59
  validator = ToolSchemaBase()
59
60
  for tool in self.tools_adapter.get_tools():
60
61
  # Validate the tool's class (not instance)
@@ -5,7 +5,6 @@ ProviderRegistry: Handles provider listing and selection logic for janito CLI.
5
5
  from rich.table import Table
6
6
  from janito.cli.console import shared_console
7
7
  from janito.providers.registry import LLMProviderRegistry
8
- from janito.providers.provider_static_info import STATIC_PROVIDER_METADATA
9
8
  from janito.llm.auth import LLMAuthManager
10
9
  import sys
11
10
  from janito.exceptions import MissingProviderSelectionException
@@ -21,7 +20,9 @@ class ProviderRegistry:
21
20
  self._print_table(table)
22
21
 
23
22
  def _get_provider_names(self):
24
- return list(STATIC_PROVIDER_METADATA.keys())
23
+ from janito.providers.registry import LLMProviderRegistry
24
+
25
+ return LLMProviderRegistry.list_providers()
25
26
 
26
27
  def _create_table(self):
27
28
  table = Table(title="Supported LLM Providers")
@@ -71,77 +72,35 @@ class ProviderRegistry:
71
72
  print(ascii_row)
72
73
 
73
74
  def _get_provider_info(self, provider_name):
74
- static_info = STATIC_PROVIDER_METADATA.get(provider_name, {})
75
- maintainer_val = static_info.get("maintainer", "-")
76
- maintainer = (
77
- "[red]🚨 Needs maintainer[/red]"
78
- if maintainer_val == "Needs maintainer"
79
- else f"👤 {maintainer_val}"
80
- )
81
- model_names = "-"
82
- unavailable_reason = None
75
+ provider_class = LLMProviderRegistry.get(provider_name)
76
+ maintainer = getattr(provider_class, "MAINTAINER", "-")
77
+ maintainer = f"👤 {maintainer}" if maintainer != "-" else maintainer
78
+ model_names = self._get_model_names(provider_name)
83
79
  skip = False
84
- try:
85
- provider_class = LLMProviderRegistry.get(provider_name)
86
- creds = LLMAuthManager().get_credentials(provider_name)
87
- provider_instance = None
88
- instantiation_failed = False
89
- try:
90
- provider_instance = provider_class()
91
- except NotImplementedError:
92
- skip = True
93
- unavailable_reason = "Not implemented"
94
- model_names = f"[red]❌ Not implemented[/red]"
95
- except Exception as e:
96
- instantiation_failed = True
97
- unavailable_reason = (
98
- f"Unavailable (import error or missing dependency): {str(e)}"
99
- )
100
- model_names = f"[red]❌ {unavailable_reason}[/red]"
101
- if not instantiation_failed and provider_instance is not None:
102
- available, unavailable_reason = self._get_availability(
103
- provider_instance
104
- )
105
- if (
106
- not available
107
- and unavailable_reason
108
- and "not implemented" in str(unavailable_reason).lower()
109
- ):
110
- skip = True
111
- if available:
112
- model_names = self._get_model_names(provider_name)
113
- else:
114
- model_names = f"[red]❌ {unavailable_reason}[/red]"
115
- except Exception as import_error:
116
- model_names = f"[red]❌ Unavailable (cannot import provider module): {str(import_error)}[/red]"
117
80
  return (provider_name, maintainer, model_names, skip)
118
81
 
119
- def _get_availability(self, provider_instance):
82
+ def _get_model_names(self, provider_name):
120
83
  try:
121
- available = getattr(provider_instance, "available", True)
122
- unavailable_reason = getattr(provider_instance, "unavailable_reason", None)
84
+ provider_class = LLMProviderRegistry.get(provider_name)
85
+ module_parts = provider_class.__module__.split(".")
86
+ # Build the correct import path: janito.providers.{provider}.model_info
87
+ model_info_module = f"janito.providers.{provider_name}.model_info"
88
+ model_info_mod = __import__(model_info_module, fromlist=["MODEL_SPECS"])
89
+
90
+ # Handle different model spec variable names
91
+ model_specs = None
92
+ if hasattr(model_info_mod, "MODEL_SPECS"):
93
+ model_specs = model_info_mod.MODEL_SPECS
94
+ elif hasattr(model_info_mod, "MOONSHOTAI_MODEL_SPECS"):
95
+ model_specs = model_info_mod.MOONSHOTAI_MODEL_SPECS
96
+
97
+ if provider_name == "groq":
98
+ return "<any> (must be provided)"
99
+ if model_specs:
100
+ return ", ".join(model_specs.keys())
101
+ return "-"
123
102
  except Exception as e:
124
- available = False
125
- unavailable_reason = f"Error reading runtime availability: {str(e)}"
126
- return available, unavailable_reason
127
-
128
- def _get_model_names(self, provider_name):
129
- provider_to_specs = {
130
- "openai": "janito.providers.openai.model_info",
131
- "azure_openai": "janito.providers.azure_openai.model_info",
132
- "google": "janito.providers.google.model_info",
133
- "anthropic": "janito.providers.anthropic.model_info",
134
- "deepseek": "janito.providers.deepseek.model_info",
135
- }
136
- if provider_name in provider_to_specs:
137
- try:
138
- mod = __import__(
139
- provider_to_specs[provider_name], fromlist=["MODEL_SPECS"]
140
- )
141
- return ", ".join(mod.MODEL_SPECS.keys())
142
- except Exception:
143
- return "(Error)"
144
- return "-"
103
+ return "-"
145
104
 
146
105
  def _maintainer_sort_key(self, row):
147
106
  maint = row[1]
@@ -157,8 +116,10 @@ class ProviderRegistry:
157
116
  return None
158
117
  provider_class = LLMProviderRegistry.get(provider_name)
159
118
  if provider_class is None:
160
- available = ', '.join(LLMProviderRegistry.list_providers())
161
- print(f"Error: Provider '{provider_name}' is not recognized. Available providers: {available}.")
119
+ available = ", ".join(LLMProviderRegistry.list_providers())
120
+ print(
121
+ f"Error: Provider '{provider_name}' is not recognized. Available providers: {available}."
122
+ )
162
123
  return None
163
124
  return provider_class
164
125
 
@@ -5,3 +5,4 @@ import janito.providers.azure_openai.provider
5
5
  import janito.providers.anthropic.provider
6
6
  import janito.providers.deepseek.provider
7
7
  import janito.providers.moonshotai.provider
8
+ import janito.providers.groq.provider
@@ -38,4 +38,3 @@ MODEL_SPECS = {
38
38
  driver="OpenAIModelDriver",
39
39
  ),
40
40
  }
41
-
@@ -1,12 +1,3 @@
1
- from janito.llm.provider import LLMProvider
2
- from janito.llm.model import LLMModelInfo
3
- from janito.llm.auth import LLMAuthManager
4
- from janito.llm.driver_config import LLMDriverConfig
5
- from janito.tools import get_local_tools_adapter
6
- from janito.providers.registry import LLMProviderRegistry
7
-
8
- from .model_info import MODEL_SPECS
9
-
10
1
  from janito.llm.provider import LLMProvider
11
2
  from janito.llm.model import LLMModelInfo
12
3
  from janito.llm.auth import LLMAuthManager
@@ -16,9 +7,11 @@ from janito.providers.registry import LLMProviderRegistry
16
7
  from .model_info import MODEL_SPECS
17
8
  from janito.drivers.openai.driver import OpenAIModelDriver
18
9
 
10
+
19
11
  class AnthropicProvider(LLMProvider):
20
12
  name = "anthropic"
21
- maintainer = "Needs maintainer"
13
+ NAME = "anthropic"
14
+ MAINTAINER = "Alberto Minetti <alberto.minetti@gmail.com>"
22
15
  MODEL_SPECS = MODEL_SPECS
23
16
  DEFAULT_MODEL = "claude-3-7-sonnet-20250219"
24
17
 
@@ -27,10 +20,10 @@ class AnthropicProvider(LLMProvider):
27
20
  ):
28
21
  self._tools_adapter = get_local_tools_adapter()
29
22
  self.auth_manager = auth_manager or LLMAuthManager()
30
- self._api_key = self.auth_manager.get_credentials(type(self).name)
23
+ self._api_key = self.auth_manager.get_credentials(type(self).NAME)
31
24
  self._tools_adapter = get_local_tools_adapter()
32
25
  self._driver_config = config or LLMDriverConfig(model=None)
33
- if not getattr(self._driver_config, 'model', None):
26
+ if not getattr(self._driver_config, "model", None):
34
27
  self._driver_config.model = self.DEFAULT_MODEL
35
28
  if not self._driver_config.api_key:
36
29
  self._driver_config.api_key = self._api_key
@@ -42,7 +35,9 @@ class AnthropicProvider(LLMProvider):
42
35
  @property
43
36
  def driver(self) -> OpenAIModelDriver:
44
37
  if not self.available:
45
- raise ImportError(f"AnthropicProvider unavailable: {self.unavailable_reason}")
38
+ raise ImportError(
39
+ f"AnthropicProvider unavailable: {self.unavailable_reason}"
40
+ )
46
41
  return self._driver
47
42
 
48
43
  @property
@@ -77,4 +72,4 @@ class AnthropicProvider(LLMProvider):
77
72
  return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
78
73
 
79
74
 
80
- LLMProviderRegistry.register(AnthropicProvider.name, AnthropicProvider)
75
+ LLMProviderRegistry.register(AnthropicProvider.NAME, AnthropicProvider)
@@ -16,7 +16,8 @@ maintainer = "João Pinto <lamego.pinto@gmail.com>"
16
16
 
17
17
  class AzureOpenAIProvider(LLMProvider):
18
18
  name = "azure_openai"
19
- maintainer = "João Pinto <lamego.pinto@gmail.com>"
19
+ NAME = "azure_openai"
20
+ MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
20
21
  MODEL_SPECS = MODEL_SPECS
21
22
  DEFAULT_MODEL = "azure_openai_deployment"
22
23
 
@@ -30,7 +31,7 @@ class AzureOpenAIProvider(LLMProvider):
30
31
  self._driver = None
31
32
  return
32
33
  self._auth_manager = auth_manager or LLMAuthManager()
33
- self._api_key = self._auth_manager.get_credentials(type(self).name)
34
+ self._api_key = self._auth_manager.get_credentials(type(self).NAME)
34
35
  self._tools_adapter = get_local_tools_adapter()
35
36
  self._driver_config = config or LLMDriverConfig(model=None)
36
37
  if not self._driver_config.model:
@@ -41,6 +42,7 @@ class AzureOpenAIProvider(LLMProvider):
41
42
  self._driver_config.extra["api_version"] = "2023-05-15"
42
43
  # Inject azure_deployment_name from config if present
43
44
  from janito.config import config as global_config
45
+
44
46
  deployment_name = global_config.get("azure_deployment_name")
45
47
  if deployment_name:
46
48
  self._driver_config.extra["azure_deployment_name"] = deployment_name
@@ -76,7 +78,10 @@ class AzureOpenAIProvider(LLMProvider):
76
78
  """
77
79
  if model_name is None:
78
80
  # Return all known specs, but note: only static ones are listed
79
- return {name: model_info.to_dict() for name, model_info in self.MODEL_SPECS.items()}
81
+ return {
82
+ name: model_info.to_dict()
83
+ for name, model_info in self.MODEL_SPECS.items()
84
+ }
80
85
  if model_name in self.MODEL_SPECS:
81
86
  return self.MODEL_SPECS[model_name].to_dict()
82
87
  # Accept any deployment name as a valid model
@@ -120,4 +125,4 @@ class AzureOpenAIProvider(LLMProvider):
120
125
  return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
121
126
 
122
127
 
123
- LLMProviderRegistry.register(AzureOpenAIProvider.name, AzureOpenAIProvider)
128
+ LLMProviderRegistry.register(AzureOpenAIProvider.NAME, AzureOpenAIProvider)
@@ -12,9 +12,10 @@ available = OpenAIModelDriver.available
12
12
  unavailable_reason = OpenAIModelDriver.unavailable_reason
13
13
 
14
14
 
15
- class DeepseekProvider(LLMProvider):
15
+ class DeepSeekProvider(LLMProvider):
16
16
  name = "deepseek"
17
- maintainer = "Needs maintainer"
17
+ NAME = "deepseek"
18
+ MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
18
19
  MODEL_SPECS = MODEL_SPECS
19
20
  DEFAULT_MODEL = "deepseek-chat" # Options: deepseek-chat, deepseek-reasoner
20
21
 
@@ -28,7 +29,7 @@ class DeepseekProvider(LLMProvider):
28
29
  self._driver = None
29
30
  else:
30
31
  self.auth_manager = auth_manager or LLMAuthManager()
31
- self._api_key = self.auth_manager.get_credentials(type(self).name)
32
+ self._api_key = self.auth_manager.get_credentials(type(self).NAME)
32
33
  self._tools_adapter = get_local_tools_adapter()
33
34
  self._driver_config = config or LLMDriverConfig(model=None)
34
35
  if not self._driver_config.model:
@@ -91,4 +92,4 @@ class DeepseekProvider(LLMProvider):
91
92
  return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
92
93
 
93
94
 
94
- LLMProviderRegistry.register(DeepseekProvider.name, DeepseekProvider)
95
+ LLMProviderRegistry.register(DeepSeekProvider.NAME, DeepSeekProvider)
@@ -10,7 +10,7 @@ MODEL_SPECS = {
10
10
  max_cot=24576,
11
11
  thinking_supported=True,
12
12
  ),
13
- "gemini-2.5-pro": LLMModelInfo(
13
+ "gemini-2.5-pro": LLMModelInfo(
14
14
  name="gemini-2.5-pro",
15
15
  other={"description": "Google Gemini 2.5 Pro (OpenAI-compatible endpoint)"},
16
16
  open="google",
@@ -21,7 +21,9 @@ MODEL_SPECS = {
21
21
  ),
22
22
  "gemini-2.5-flash-lite-preview-06-17": LLMModelInfo(
23
23
  name="gemini-2.5-flash-lite-preview-06-17",
24
- other={"description": "Google Gemini 2.5 Flash-Lite Preview (OpenAI-compatible endpoint)"},
24
+ other={
25
+ "description": "Google Gemini 2.5 Flash-Lite Preview (OpenAI-compatible endpoint)"
26
+ },
25
27
  open="google",
26
28
  driver="OpenAIModelDriver",
27
29
  max_response=64000,
@@ -13,9 +13,11 @@ try:
13
13
  except ImportError:
14
14
  MODEL_SPECS = {}
15
15
 
16
+
16
17
  class GoogleProvider(LLMProvider):
17
18
  name = "google"
18
- maintainer = "João Pinto <lamego.pinto@gmail.com>"
19
+ NAME = "google"
20
+ MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
19
21
  MODEL_SPECS = MODEL_SPECS
20
22
  DEFAULT_MODEL = "gemini-2.5-flash" # Default Gemini model
21
23
 
@@ -33,19 +35,23 @@ class GoogleProvider(LLMProvider):
33
35
  self._tools_adapter = get_local_tools_adapter()
34
36
  self._driver_config = config or LLMDriverConfig(model=None)
35
37
  # Only set default if model is not set by CLI/config
36
- if not getattr(self._driver_config, 'model', None):
38
+ if not getattr(self._driver_config, "model", None):
37
39
  self._driver_config.model = self.DEFAULT_MODEL
38
40
  if not self._driver_config.api_key:
39
41
  self._driver_config.api_key = self._api_key
40
42
  # Set the Gemini API endpoint for OpenAI compatibility
41
- self._driver_config.base_url = "https://generativelanguage.googleapis.com/v1beta/openai/"
43
+ self._driver_config.base_url = (
44
+ "https://generativelanguage.googleapis.com/v1beta/openai/"
45
+ )
42
46
  self.fill_missing_device_info(self._driver_config)
43
47
  self._driver = None # to be provided by factory/agent
44
48
 
45
49
  @property
46
50
  def driver(self) -> OpenAIModelDriver:
47
51
  if not self.available:
48
- raise ImportError(f"GoogleOpenAIProvider unavailable: {self.unavailable_reason}")
52
+ raise ImportError(
53
+ f"GoogleOpenAIProvider unavailable: {self.unavailable_reason}"
54
+ )
49
55
  return self._driver
50
56
 
51
57
  @property
@@ -80,4 +86,4 @@ class GoogleProvider(LLMProvider):
80
86
  return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
81
87
 
82
88
 
83
- LLMProviderRegistry.register(GoogleProvider.name, GoogleProvider)
89
+ LLMProviderRegistry.register(GoogleProvider.NAME, GoogleProvider)
@@ -0,0 +1 @@
1
+ from .provider import GroqProvider
@@ -0,0 +1,46 @@
1
+ # Groq provider model specifications
2
+ from janito.llm.model import LLMModelInfo
3
+
4
+ MODEL_SPECS = {
5
+ "moonshotai/kimi-k2-instruct": LLMModelInfo(
6
+ name="moonshotai/kimi-k2-instruct",
7
+ context=128000,
8
+ max_input=122880,
9
+ max_cot="N/A",
10
+ max_response=4096,
11
+ thinking_supported=False,
12
+ default_temp=0.2,
13
+ open="groq",
14
+ driver="GroqModelDriver",
15
+ other={
16
+ "description": "Kimi K2 Instruct model by Moonshot AI",
17
+ "supports_tools": True,
18
+ "supports_streaming": True,
19
+ "supports_vision": False,
20
+ "supports_system_prompt": True,
21
+ "supports_temperature": True,
22
+ "supports_top_p": True,
23
+ "supports_frequency_penalty": True,
24
+ "supports_presence_penalty": True,
25
+ "supports_stop_sequences": True,
26
+ "supports_max_tokens": True,
27
+ "supports_seed": False,
28
+ "supports_json_mode": True,
29
+ "supports_logprobs": False,
30
+ "supports_top_logprobs": False,
31
+ "supports_response_format": True,
32
+ "supports_n": False,
33
+ "supports_best_of": False,
34
+ "supports_echo": False,
35
+ "supports_logit_bias": False,
36
+ "supports_user": False,
37
+ "supports_assistant": False,
38
+ "supports_system": False,
39
+ "supports_functions": True,
40
+ "supports_tool_calls": True,
41
+ "supports_parallel_tool_calls": True,
42
+ "supports_stream_options": False,
43
+ "supports_include_usage": False,
44
+ },
45
+ )
46
+ }
@@ -0,0 +1,76 @@
1
+ from janito.llm.provider import LLMProvider
2
+ from janito.llm.model import LLMModelInfo
3
+ from janito.llm.auth import LLMAuthManager
4
+ from janito.llm.driver_config import LLMDriverConfig
5
+ from janito.drivers.openai.driver import OpenAIModelDriver
6
+ from janito.tools import get_local_tools_adapter
7
+ from janito.providers.registry import LLMProviderRegistry
8
+ from .model_info import MODEL_SPECS
9
+ from queue import Queue
10
+
11
+ available = OpenAIModelDriver.available
12
+ unavailable_reason = OpenAIModelDriver.unavailable_reason
13
+
14
+
15
+ class GroqProvider(LLMProvider):
16
+ name = "groq"
17
+ NAME = "groq"
18
+ MAINTAINER = "Groq Inc. <support@groq.com>"
19
+ MODEL_SPECS = MODEL_SPECS
20
+ DEFAULT_MODEL = "moonshotai/kimi-k2-instruct"
21
+
22
+ def __init__(
23
+ self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
24
+ ):
25
+ if not self.available:
26
+ self._tools_adapter = get_local_tools_adapter()
27
+ self._driver = None
28
+ else:
29
+ self.auth_manager = auth_manager or LLMAuthManager()
30
+ self._api_key = self.auth_manager.get_credentials(type(self).NAME)
31
+ self._tools_adapter = get_local_tools_adapter()
32
+ self._driver_config = config or LLMDriverConfig(model=self.DEFAULT_MODEL)
33
+ if not self._driver_config.model:
34
+ self._driver_config.model = self.DEFAULT_MODEL
35
+
36
+ if not self._driver_config.api_key:
37
+ self._driver_config.api_key = self._api_key
38
+ self._driver_config.base_url = "https://api.groq.com/openai/v1"
39
+ self.fill_missing_device_info(self._driver_config)
40
+ self._driver = None
41
+
42
+ @property
43
+ def driver(self) -> OpenAIModelDriver:
44
+ if not self.available:
45
+ raise ImportError(f"GroqProvider unavailable: {self.unavailable_reason}")
46
+ return self._driver
47
+
48
+ @property
49
+ def available(self):
50
+ return available
51
+
52
+ @property
53
+ def unavailable_reason(self):
54
+ return unavailable_reason
55
+
56
+ def create_driver(self):
57
+ driver = OpenAIModelDriver(
58
+ tools_adapter=self._tools_adapter, provider_name=self.NAME
59
+ )
60
+ driver.config = self._driver_config
61
+ return driver
62
+
63
+ @property
64
+ def model_name(self):
65
+ return self._driver_config.model
66
+
67
+ @property
68
+ def driver_config(self):
69
+ return self._driver_config
70
+
71
+ def execute_tool(self, tool_name: str, event_bus, *args, **kwargs):
72
+ self._tools_adapter.event_bus = event_bus
73
+ return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
74
+
75
+
76
+ LLMProviderRegistry.register(GroqProvider.NAME, GroqProvider)
@@ -6,13 +6,17 @@ from janito.tools import get_local_tools_adapter
6
6
  from janito.providers.registry import LLMProviderRegistry
7
7
  from .model_info import MOONSHOTAI_MODEL_SPECS
8
8
 
9
+
9
10
  class MoonshotAIProvider(LLMProvider):
10
11
  name = "moonshotai"
11
- maintainer = "João Pinto <lamego.pinto@gmail.com>"
12
+ NAME = "moonshotai"
13
+ MAINTAINER = "João Pinto <lamego.pinto@gmail.com>"
12
14
  MODEL_SPECS = MOONSHOTAI_MODEL_SPECS
13
15
  DEFAULT_MODEL = "kimi-k2-0711-preview"
14
16
 
15
- def __init__(self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None):
17
+ def __init__(
18
+ self, auth_manager: LLMAuthManager = None, config: LLMDriverConfig = None
19
+ ):
16
20
  if not self.available:
17
21
  self._tools_adapter = get_local_tools_adapter()
18
22
  self._driver = None
@@ -49,7 +53,9 @@ class MoonshotAIProvider(LLMProvider):
49
53
  @property
50
54
  def driver(self) -> OpenAIModelDriver:
51
55
  if not self.available:
52
- raise ImportError(f"MoonshotAIProvider unavailable: {self.unavailable_reason}")
56
+ raise ImportError(
57
+ f"MoonshotAIProvider unavailable: {self.unavailable_reason}"
58
+ )
53
59
  return self._driver
54
60
 
55
61
  @property
@@ -79,4 +85,5 @@ class MoonshotAIProvider(LLMProvider):
79
85
  self._tools_adapter.event_bus = event_bus
80
86
  return self._tools_adapter.execute_by_name(tool_name, *args, **kwargs)
81
87
 
82
- LLMProviderRegistry.register(MoonshotAIProvider.name, MoonshotAIProvider)
88
+
89
+ LLMProviderRegistry.register(MoonshotAIProvider.NAME, MoonshotAIProvider)
@@ -123,5 +123,4 @@ MODEL_SPECS = {
123
123
  open="openai",
124
124
  driver="OpenAIModelDriver",
125
125
  ),
126
-
127
126
  }