janito 2.20.1__py3-none-any.whl → 2.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. janito/README.md +47 -4
  2. janito/agent/setup_agent.py +34 -4
  3. janito/agent/templates/profiles/system_prompt_template_Developer_with_Python_Tools.txt.j2 +0 -0
  4. janito/agent/templates/profiles/system_prompt_template_developer.txt.j2 +0 -0
  5. janito/agent/templates/profiles/system_prompt_template_market_analyst.txt.j2 +10 -0
  6. janito/agent/templates/profiles/system_prompt_template_model_conversation_without_tools_or_context.txt.j2 +0 -0
  7. janito/cli/chat_mode/session_profile_select.py +20 -3
  8. janito/cli/chat_mode/shell/commands.bak.zip +0 -0
  9. janito/cli/chat_mode/shell/session.bak.zip +0 -0
  10. janito/cli/cli_commands/list_profiles.py +29 -1
  11. janito/cli/cli_commands/show_system_prompt.py +45 -4
  12. janito/docs/GETTING_STARTED.md +85 -12
  13. janito/drivers/dashscope.bak.zip +0 -0
  14. janito/drivers/openai/README.md +0 -0
  15. janito/drivers/openai_responses.bak.zip +0 -0
  16. janito/llm/README.md +0 -0
  17. janito/mkdocs.yml +0 -0
  18. janito/providers/__init__.py +1 -0
  19. janito/providers/azure_openai/provider.py +1 -1
  20. janito/providers/dashscope.bak.zip +0 -0
  21. janito/providers/ibm/README.md +99 -0
  22. janito/providers/ibm/__init__.py +1 -0
  23. janito/providers/ibm/model_info.py +87 -0
  24. janito/providers/ibm/provider.py +149 -0
  25. janito/shell.bak.zip +0 -0
  26. janito/tools/DOCSTRING_STANDARD.txt +0 -0
  27. janito/tools/README.md +0 -0
  28. janito/tools/adapters/local/fetch_url.py +175 -25
  29. janito/tools/outline_file.bak.zip +0 -0
  30. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/METADATA +411 -411
  31. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/RECORD +20 -15
  32. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/entry_points.txt +0 -0
  33. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/licenses/LICENSE +0 -0
  34. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/top_level.txt +0 -0
  35. {janito-2.20.1.dist-info → janito-2.22.0.dist-info}/WHEEL +0 -0
janito/README.md CHANGED
@@ -43,7 +43,12 @@ janito -p google -m gemini-2.0-flash-exp "Generate unit tests"
43
43
 
44
44
  ### Interactive Chat Mode
45
45
 
46
- Start an interactive session:
46
+ Start an interactive session (default mode):
47
+ ```bash
48
+ janito
49
+ ```
50
+
51
+ Or explicitly:
47
52
  ```bash
48
53
  janito --chat
49
54
  ```
@@ -80,17 +85,23 @@ janito --set model=kimi-k1-8k
80
85
 
81
86
  ### OpenAI
82
87
 
83
- - **Models**: gpt-4, gpt-4-turbo, gpt-3.5-turbo
88
+ - **Models**: gpt-5, gpt-4.1, gpt-4o, gpt-4-turbo, gpt-3.5-turbo
84
89
  - **Setup**: Get API key from [OpenAI Platform](https://platform.openai.com/)
85
90
 
86
91
  ### Anthropic
87
92
 
88
- - **Models**: claude-3-5-sonnet-20241022, claude-3-opus-20240229
93
+ - **Models**: claude-3-7-sonnet-20250219, claude-3-5-sonnet-20241022, claude-3-opus-20250514
89
94
  - **Setup**: Get API key from [Anthropic Console](https://console.anthropic.com/)
90
95
 
96
+ ### IBM WatsonX
97
+
98
+ - **Models**: ibm/granite-3-8b-instruct, ibm/granite-3-2b-instruct, meta-llama/llama-3-1-8b-instruct, meta-llama/llama-3-1-70b-instruct, mistralai/mistral-large
99
+ - **Strengths**: Enterprise-grade AI, IBM Granite models, hosted Llama and Mistral models
100
+ - **Setup**: Get API key and project ID from [IBM Cloud](https://cloud.ibm.com/)
101
+
91
102
  ### Google
92
103
 
93
- - **Models**: gemini-2.0-flash-exp, gemini-1.5-pro
104
+ - **Models**: gemini-2.5-flash, gemini-2.5-pro, gemini-2.5-flash-lite-preview-06-17
94
105
  - **Setup**: Get API key from [Google AI Studio](https://makersuite.google.com/)
95
106
 
96
107
  ## Advanced Features
@@ -114,12 +125,44 @@ janito --role python-expert "Optimize this algorithm"
114
125
 
115
126
  ### Environment Variables
116
127
  You can also configure via environment variables:
128
+
129
+ **MoonshotAI:**
117
130
  ```bash
118
131
  export MOONSHOTAI_API_KEY=your_key_here
119
132
  export JANITO_PROVIDER=moonshotai
120
133
  export JANITO_MODEL=kimi-k1-8k
121
134
  ```
122
135
 
136
+ **OpenAI:**
137
+ ```bash
138
+ export OPENAI_API_KEY=your_key_here
139
+ export JANITO_PROVIDER=openai
140
+ export JANITO_MODEL=gpt-5
141
+ ```
142
+
143
+ **IBM WatsonX:**
144
+ ```bash
145
+ export WATSONX_API_KEY=your_key_here
146
+ export WATSONX_PROJECT_ID=your_project_id
147
+ export WATSONX_SPACE_ID=your_space_id # optional
148
+ export JANITO_PROVIDER=ibm
149
+ export JANITO_MODEL=ibm/granite-3-8b-instruct
150
+ ```
151
+
152
+ **Anthropic:**
153
+ ```bash
154
+ export ANTHROPIC_API_KEY=your_key_here
155
+ export JANITO_PROVIDER=anthropic
156
+ export JANITO_MODEL=claude-3-7-sonnet-20250219
157
+ ```
158
+
159
+ **Google:**
160
+ ```bash
161
+ export GOOGLE_API_KEY=your_key_here
162
+ export JANITO_PROVIDER=google
163
+ export JANITO_MODEL=gemini-2.5-flash
164
+ ```
165
+
123
166
  ## Examples
124
167
 
125
168
  ### Code Generation
@@ -52,10 +52,40 @@ def _load_template_content(profile, templates_dir):
52
52
  with open(user_template_path, "r", encoding="utf-8") as file:
53
53
  return file.read(), user_template_path
54
54
 
55
- # If nothing matched, raise an informative error
56
- raise FileNotFoundError(
57
- f"[janito] Could not find profile-specific template '{template_filename}' in {template_path} nor in janito.agent.templates.profiles package nor in user profiles directory {user_template_path}."
58
- )
55
+ # If nothing matched, list available profiles and raise an informative error
56
+ from janito.cli.cli_commands.list_profiles import _gather_default_profiles, _gather_user_profiles
57
+
58
+ default_profiles = _gather_default_profiles()
59
+ user_profiles = _gather_user_profiles()
60
+
61
+ available_profiles = []
62
+ if default_profiles:
63
+ available_profiles.extend([(p, "default") for p in default_profiles])
64
+ if user_profiles:
65
+ available_profiles.extend([(p, "user") for p in user_profiles])
66
+
67
+ # Normalize the input profile for better matching suggestions
68
+ normalized_input = re.sub(r"\s+", " ", profile.strip().lower())
69
+
70
+ if available_profiles:
71
+ profile_list = "\n".join([f" - {name} ({source})" for name, source in available_profiles])
72
+
73
+ # Find close matches
74
+ close_matches = []
75
+ for name, source in available_profiles:
76
+ normalized_name = name.lower()
77
+ if normalized_input in normalized_name or normalized_name in normalized_input:
78
+ close_matches.append(name)
79
+
80
+ suggestion = ""
81
+ if close_matches:
82
+ suggestion = f"\nDid you mean: {', '.join(close_matches)}?"
83
+
84
+ error_msg = f"[janito] Could not find profile '{profile}'. Available profiles:\n{profile_list}{suggestion}"
85
+ else:
86
+ error_msg = f"[janito] Could not find profile '{profile}'. No profiles available."
87
+
88
+ raise FileNotFoundError(error_msg)
59
89
  # Replace spaces in profile name with underscores for filename resolution
60
90
  sanitized_profile = re.sub(r"\\s+", "_", profile.strip()) if profile else profile
61
91
  """
@@ -0,0 +1,10 @@
1
+ You are: market analyst specializing in financial markets, business intelligence, and economic research
2
+
3
+ Focus on providing market analysis, financial insights, and business intelligence. When analyzing data:
4
+
5
+ • Identify market trends and patterns
6
+ • Provide quantitative analysis with clear metrics
7
+ • Consider economic indicators and market sentiment
8
+ • Offer actionable insights for business decisions
9
+ • Use appropriate financial terminology and frameworks
10
+ • Reference relevant market data, benchmarks, and industry standards
@@ -111,6 +111,7 @@ def select_profile():
111
111
  choices = [
112
112
  "Developer with Python Tools",
113
113
  "Developer",
114
+ "Market Analyst",
114
115
  "Custom system prompt...",
115
116
  "Raw Model Session (no tools, no context)",
116
117
  ]
@@ -146,9 +147,9 @@ def select_profile():
146
147
  from jinja2 import Template
147
148
  from janito.agent.setup_agent import _prepare_template_context
148
149
 
149
- template_path = Path(
150
- "./janito/agent/templates/profiles/system_prompt_template_Developer.txt.j2"
151
- )
150
+ # Get the absolute path relative to the current script location
151
+ current_dir = Path(__file__).parent
152
+ template_path = current_dir / "../../agent/templates/profiles/system_prompt_template_developer.txt.j2"
152
153
  with open(template_path, "r", encoding="utf-8") as f:
153
154
  template_content = f.read()
154
155
 
@@ -156,4 +157,20 @@ def select_profile():
156
157
  context = _prepare_template_context("developer", "Developer", None)
157
158
  prompt = template.render(**context)
158
159
  return {"profile": "Developer", "profile_system_prompt": prompt}
160
+ elif answer == "Market Analyst":
161
+ # Return the content of the built-in Market Analyst profile prompt
162
+ from pathlib import Path
163
+ from jinja2 import Template
164
+ from janito.agent.setup_agent import _prepare_template_context
165
+
166
+ # Get the absolute path relative to the current script location
167
+ current_dir = Path(__file__).parent
168
+ template_path = current_dir / "../../agent/templates/profiles/system_prompt_template_market_analyst.txt.j2"
169
+ with open(template_path, "r", encoding="utf-8") as f:
170
+ template_content = f.read()
171
+
172
+ template = Template(template_content)
173
+ context = _prepare_template_context("market_analyst", "Market Analyst", None)
174
+ prompt = template.render(**context)
175
+ return {"profile": "Market Analyst", "profile_system_prompt": prompt}
159
176
  return answer
File without changes
File without changes
@@ -19,7 +19,35 @@ def _extract_profile_name(filename: str) -> str:
19
19
  filename = filename[len(_PREFIX) :]
20
20
  if filename.endswith(_SUFFIX):
21
21
  filename = filename[: -len(_SUFFIX)]
22
- return filename.replace("_", " ")
22
+
23
+ # Convert to title case for consistent capitalization, but handle common acronyms
24
+ name = filename.replace("_", " ")
25
+
26
+ # Convert to proper title case with consistent capitalization
27
+ name = filename.replace("_", " ")
28
+
29
+ # Handle special cases and acronyms
30
+ special_cases = {
31
+ "python": "Python",
32
+ "tools": "Tools",
33
+ "model": "Model",
34
+ "context": "Context",
35
+ "developer": "Developer",
36
+ "analyst": "Analyst",
37
+ "conversation": "Conversation",
38
+ "without": "Without"
39
+ }
40
+
41
+ words = name.split()
42
+ capitalized_words = []
43
+ for word in words:
44
+ lower_word = word.lower()
45
+ if lower_word in special_cases:
46
+ capitalized_words.append(special_cases[lower_word])
47
+ else:
48
+ capitalized_words.append(word.capitalize())
49
+
50
+ return " ".join(capitalized_words)
23
51
 
24
52
 
25
53
  def _gather_default_profiles():
@@ -60,7 +60,17 @@ def _load_template(profile, templates_dir):
60
60
  ).open("r", encoding="utf-8") as file:
61
61
  template_content = file.read()
62
62
  except (FileNotFoundError, ModuleNotFoundError, AttributeError):
63
- return template_filename, None
63
+ # Also check user profiles directory
64
+ from pathlib import Path
65
+ import os
66
+ user_profiles_dir = Path(os.path.expanduser("~/.janito/profiles"))
67
+ user_template_path = user_profiles_dir / template_filename
68
+ if user_template_path.exists():
69
+ with open(user_template_path, "r", encoding="utf-8") as file:
70
+ template_content = file.read()
71
+ else:
72
+ template_content = None
73
+ return template_filename, template_content
64
74
  return template_filename, template_content
65
75
 
66
76
 
@@ -117,9 +127,40 @@ def handle_show_system_prompt(args):
117
127
 
118
128
  if not template_content:
119
129
  if profile:
120
- raise FileNotFoundError(
121
- f"[janito] Could not find profile-specific template '{template_filename}' in {templates_dir / template_filename} nor in janito.agent.templates.profiles package."
122
- )
130
+ from janito.cli.cli_commands.list_profiles import _gather_default_profiles, _gather_user_profiles
131
+ import re
132
+
133
+ default_profiles = _gather_default_profiles()
134
+ user_profiles = _gather_user_profiles()
135
+
136
+ available_profiles = []
137
+ if default_profiles:
138
+ available_profiles.extend([(p, "default") for p in default_profiles])
139
+ if user_profiles:
140
+ available_profiles.extend([(p, "user") for p in user_profiles])
141
+
142
+ # Normalize the input profile for better matching suggestions
143
+ normalized_input = re.sub(r"\s+", " ", profile.strip().lower())
144
+
145
+ if available_profiles:
146
+ profile_list = "\n".join([f" - {name} ({source})" for name, source in available_profiles])
147
+
148
+ # Find close matches
149
+ close_matches = []
150
+ for name, source in available_profiles:
151
+ normalized_name = name.lower()
152
+ if normalized_input in normalized_name or normalized_name in normalized_input:
153
+ close_matches.append(name)
154
+
155
+ suggestion = ""
156
+ if close_matches:
157
+ suggestion = f"\nDid you mean: {', '.join(close_matches)}?"
158
+
159
+ error_msg = f"[janito] Could not find profile '{profile}'. Available profiles:\n{profile_list}{suggestion}"
160
+ else:
161
+ error_msg = f"[janito] Could not find profile '{profile}'. No profiles available."
162
+
163
+ raise FileNotFoundError(error_msg)
123
164
  else:
124
165
  print(
125
166
  f"[janito] Could not find {template_filename} in {templates_dir / template_filename} nor in janito.agent.templates.profiles package."
@@ -1,6 +1,6 @@
1
1
  # Getting Started with Janito
2
2
 
3
- This guide will help you set up Janito CLI quickly and start using it with MoonshotAI as your default provider.
3
+ This guide will help you set up Janito CLI quickly and start using it with your preferred AI provider.
4
4
 
5
5
  ## Quick Setup (2 minutes)
6
6
 
@@ -9,14 +9,29 @@ This guide will help you set up Janito CLI quickly and start using it with Moons
9
9
  pip install janito
10
10
  ```
11
11
 
12
- ### 2. Get Your MoonshotAI API Key
12
+ ### 2. Choose Your Provider
13
13
 
14
+ Janito supports multiple AI providers. Choose one to get started:
15
+
16
+ **MoonshotAI (Recommended for Chinese users)**
14
17
  1. Go to [Moonshot AI Platform](https://platform.moonshot.cn/)
15
18
  2. Sign up for an account
16
19
  3. Navigate to API Keys section
17
20
  4. Create a new API key
18
21
 
22
+ **OpenAI**
23
+ 1. Go to [OpenAI Platform](https://platform.openai.com/)
24
+ 2. Sign up and add payment method
25
+ 3. Create an API key
26
+
27
+ **IBM WatsonX**
28
+ 1. Go to [IBM Cloud](https://cloud.ibm.com/)
29
+ 2. Create a WatsonX AI service
30
+ 3. Get your API key and project ID
31
+
19
32
  ### 3. Configure Janito
33
+
34
+ **MoonshotAI Setup:**
20
35
  ```bash
21
36
  # Set MoonshotAI as your default provider
22
37
  janito --set-api-key YOUR_API_KEY -p moonshotai
@@ -25,6 +40,25 @@ janito --set-api-key YOUR_API_KEY -p moonshotai
25
40
  janito "Hello, can you introduce yourself?"
26
41
  ```
27
42
 
43
+ **OpenAI Setup:**
44
+ ```bash
45
+ # Set OpenAI as your default provider
46
+ janito --set-api-key YOUR_OPENAI_API_KEY -p openai
47
+
48
+ # Verify it's working
49
+ janito "Hello, can you introduce yourself?"
50
+ ```
51
+
52
+ **IBM WatsonX Setup:**
53
+ ```bash
54
+ # Set IBM WatsonX as your default provider
55
+ janito --set-api-key YOUR_WATSONX_API_KEY -p ibm
56
+ janito --set-config ibm project_id YOUR_PROJECT_ID
57
+
58
+ # Verify it's working
59
+ janito "Hello, can you introduce yourself?"
60
+ ```
61
+
28
62
  ## Your First Commands
29
63
 
30
64
  ### Basic Usage
@@ -52,29 +86,59 @@ janito -W ./my_project "Create a REST API with FastAPI"
52
86
 
53
87
  ### Set as Default Provider
54
88
  ```bash
55
- # Make MoonshotAI your permanent default
56
- janito --set provider=moonshotai
57
- janito --set model=kimi-k1-8k
89
+ # Make your chosen provider the permanent default
90
+ janito --set provider=moonshotai # or openai, ibm, etc.
91
+ janito --set model=kimi-k1-8k # or gpt-5, ibm/granite-3-8b-instruct, etc.
58
92
  ```
59
93
 
60
94
  ### Environment Variables
61
95
  You can also use environment variables:
96
+
97
+ **MoonshotAI:**
62
98
  ```bash
63
99
  export MOONSHOTAI_API_KEY=your_key_here
64
100
  export JANITO_PROVIDER=moonshotai
65
101
  export JANITO_MODEL=kimi-k1-8k
66
102
  ```
67
103
 
68
- ## MoonshotAI Models
104
+ **OpenAI:**
105
+ ```bash
106
+ export OPENAI_API_KEY=your_key_here
107
+ export JANITO_PROVIDER=openai
108
+ export JANITO_MODEL=gpt-5
109
+ ```
110
+
111
+ **IBM WatsonX:**
112
+ ```bash
113
+ export WATSONX_API_KEY=your_key_here
114
+ export WATSONX_PROJECT_ID=your_project_id
115
+ export WATSONX_SPACE_ID=your_space_id # optional
116
+ export JANITO_PROVIDER=ibm
117
+ export JANITO_MODEL=ibm/granite-3-3-8b-instruct
118
+ ```
69
119
 
70
- Janito supports these MoonshotAI models:
120
+ ## Available Models by Provider
71
121
 
122
+ ### MoonshotAI Models
72
123
  - **kimi-k1-8k**: Fast responses, good for general tasks
73
124
  - **kimi-k1-32k**: Better for longer contexts
74
125
  - **kimi-k1-128k**: Best for very long documents
75
126
  - **kimi-k2-turbo-preview**: Latest model with enhanced capabilities
76
127
  - **kimi-k2-turbo-preview**: Turbo version of the advanced reasoning model
77
128
 
129
+ ### OpenAI Models
130
+ - **gpt-5**: Latest GPT model with advanced capabilities
131
+ - **gpt-4.1**: High-performance model for complex tasks
132
+ - **gpt-4o**: Optimized for speed and cost
133
+ - **o3-mini**: Reasoning-focused model
134
+
135
+ ### IBM WatsonX Models
136
+ - **ibm/granite-3-3-8b-instruct**: IBM's latest Granite 3.3 8B Instruct model (default)
137
+ - **ibm/granite-3-8b-instruct**: IBM's Granite 3 8B Instruct model
138
+ - **meta-llama/llama-3-3-70b-instruct**: Meta Llama 3.3 70B hosted on WatsonX
139
+ - **meta-llama/llama-3-1-70b-instruct**: Meta Llama 3.1 70B hosted on WatsonX
140
+ - **mistralai/mistral-large-2407**: Latest Mistral Large model hosted on WatsonX
141
+
78
142
  ## Next Steps
79
143
 
80
144
  1. **Explore tools**: Run `janito --list-tools` to see available tools
@@ -91,14 +155,14 @@ Janito supports these MoonshotAI models:
91
155
  # Check available providers
92
156
  janito --list-providers
93
157
 
94
- # Re-register MoonshotAI
95
- janito --set-api-key YOUR_KEY -p moonshotai
158
+ # Re-register your provider
159
+ janito --set-api-key YOUR_KEY -p YOUR_PROVIDER
96
160
  ```
97
161
 
98
162
  **"Model not available" error**
99
163
  ```bash
100
- # List available MoonshotAI models
101
- janito -p moonshotai --list-models
164
+ # List available models for your provider
165
+ janito -p YOUR_PROVIDER --list-models
102
166
  ```
103
167
 
104
168
  **API key issues**
@@ -107,7 +171,16 @@ janito -p moonshotai --list-models
107
171
  janito --show-config
108
172
 
109
173
  # Reset API key
110
- janito --set-api-key NEW_KEY -p moonshotai
174
+ janito --set-api-key NEW_KEY -p YOUR_PROVIDER
175
+ ```
176
+
177
+ **IBM WatsonX specific issues**
178
+ ```bash
179
+ # Check if project ID is set
180
+ janito --show-config
181
+
182
+ # Set project ID if missing
183
+ janito --set-config ibm project_id YOUR_PROJECT_ID
111
184
  ```
112
185
 
113
186
  ### Getting Help
File without changes
File without changes
File without changes
janito/llm/README.md CHANGED
File without changes
janito/mkdocs.yml CHANGED
File without changes
@@ -9,3 +9,4 @@ import janito.providers.alibaba.provider
9
9
  import janito.providers.zai.provider
10
10
  import janito.providers.cerebras.provider
11
11
  import janito.providers.mistral.provider
12
+ import janito.providers.ibm.provider
@@ -82,7 +82,7 @@ class AzureOpenAIProvider(LLMProvider):
82
82
  If the model_name is not in MODEL_SPECS, return a generic info dict.
83
83
  """
84
84
  if model_name is None:
85
- # Return all known specs, but note: only static ones are listed
85
+ # Return all known specs
86
86
  return {
87
87
  name: model_info.to_dict()
88
88
  for name, model_info in self.MODEL_SPECS.items()
File without changes
@@ -0,0 +1,99 @@
1
+ # IBM WatsonX AI Provider
2
+
3
+ This provider enables access to IBM WatsonX AI services, including IBM's Granite models and other hosted models.
4
+
5
+ ## Setup
6
+
7
+ ### Prerequisites
8
+
9
+ 1. **IBM Cloud Account**: You need an IBM Cloud account with WatsonX AI service enabled.
10
+ 2. **API Key**: Generate an API key from your IBM Cloud dashboard.
11
+ 3. **Project ID**: Create a WatsonX project and get the project ID.
12
+
13
+ ### Authentication
14
+
15
+ Set up your credentials using the CLI:
16
+
17
+ ```bash
18
+ # Set the API key
19
+ janito --set-api-key YOUR_IBM_API_KEY -p ibm
20
+
21
+ # Set the project ID
22
+ janito --set-config ibm project_id YOUR_PROJECT_ID
23
+
24
+ # Optional: Set space ID if using WatsonX spaces
25
+ janito --set-config ibm space_id YOUR_SPACE_ID
26
+ ```
27
+
28
+ ### Environment Variables
29
+
30
+ Alternatively, you can set environment variables:
31
+
32
+ ```bash
33
+ export WATSONX_API_KEY="your-api-key"
34
+ export WATSONX_PROJECT_ID="your-project-id"
35
+ export WATSONX_SPACE_ID="your-space-id" # optional
36
+ ```
37
+
38
+ ## Available Models
39
+
40
+ The IBM provider supports the following models:
41
+
42
+ - **ibm/granite-3-8b-instruct**: IBM's Granite 3 8B Instruct model (default)
43
+ - **ibm/granite-3-2b-instruct**: IBM's Granite 3 2B Instruct model
44
+ - **meta-llama/llama-3-1-8b-instruct**: Meta Llama 3.1 8B hosted on WatsonX
45
+ - **meta-llama/llama-3-1-70b-instruct**: Meta Llama 3.1 70B hosted on WatsonX
46
+ - **mistralai/mistral-large**: Mistral Large model hosted on WatsonX
47
+
48
+ ## Usage
49
+
50
+ ### Command Line
51
+
52
+ ```bash
53
+ # Use IBM provider with default model
54
+ janito -p ibm "Explain quantum computing"
55
+
56
+ # Use specific IBM model
57
+ janito -p ibm -m ibm/granite-3-2b-instruct "Generate a Python function"
58
+
59
+ # Interactive chat mode
60
+ janito -p ibm --chat
61
+ ```
62
+
63
+ ### Configuration
64
+
65
+ You can set IBM as your default provider:
66
+
67
+ ```bash
68
+ janito --set-config provider ibm
69
+ ```
70
+
71
+ ## API Reference
72
+
73
+ The IBM provider uses IBM WatsonX's REST API with OpenAI-compatible format. The base URL is:
74
+
75
+ ```
76
+ https://us-south.ml.cloud.ibm.com
77
+ ```
78
+
79
+ ## Limitations
80
+
81
+ - **Rate Limits**: IBM WatsonX has rate limits based on your subscription tier
82
+ - **Context Window**: Models have different context window limits (typically 128K tokens)
83
+ - **Region Support**: Currently configured for US-South region
84
+
85
+ ## Troubleshooting
86
+
87
+ ### Common Issues
88
+
89
+ 1. **Authentication Error**: Ensure your API key and project ID are correct
90
+ 2. **Model Not Found**: Check if the model is available in your WatsonX project
91
+ 3. **Rate Limit Exceeded**: Wait and retry, or upgrade your subscription
92
+
93
+ ### Debug Mode
94
+
95
+ Enable debug logging to see API requests:
96
+
97
+ ```bash
98
+ janito -p ibm --verbose "Your prompt here"
99
+ ```
@@ -0,0 +1 @@
1
+ # IBM WatsonX AI Provider
@@ -0,0 +1,87 @@
1
+ """IBM WatsonX AI model specifications."""
2
+
3
+ from janito.llm.model import LLMModelInfo
4
+
5
+ MODEL_SPECS = {
6
+ "openai/gpt-oss-120b": LLMModelInfo(
7
+ name="openai/gpt-oss-120b",
8
+ context=128000,
9
+ max_input=128000,
10
+ max_response=4096,
11
+ max_cot=4096,
12
+ thinking_supported=True,
13
+ category="IBM WatsonX",
14
+ ),
15
+ "ibm/granite-3-8b-instruct": LLMModelInfo(
16
+ name="ibm/granite-3-8b-instruct",
17
+ context=128000,
18
+ max_input=128000,
19
+ max_response=4096,
20
+ max_cot=4096,
21
+ thinking_supported=False,
22
+ category="IBM WatsonX",
23
+ ),
24
+ "ibm/granite-3-3-8b-instruct": LLMModelInfo(
25
+ name="ibm/granite-3-3-8b-instruct",
26
+ context=128000,
27
+ max_input=128000,
28
+ max_response=4096,
29
+ max_cot=4096,
30
+ thinking_supported=False,
31
+ category="IBM WatsonX",
32
+ ),
33
+ "meta-llama/llama-3-1-70b-instruct": LLMModelInfo(
34
+ name="meta-llama/llama-3-1-70b-instruct",
35
+ context=128000,
36
+ max_input=128000,
37
+ max_response=4096,
38
+ max_cot=4096,
39
+ thinking_supported=False,
40
+ category="IBM WatsonX",
41
+ ),
42
+ "meta-llama/llama-3-3-70b-instruct": LLMModelInfo(
43
+ name="meta-llama/llama-3-3-70b-instruct",
44
+ context=128000,
45
+ max_input=128000,
46
+ max_response=4096,
47
+ max_cot=4096,
48
+ thinking_supported=False,
49
+ category="IBM WatsonX",
50
+ ),
51
+ "mistralai/mistral-large": LLMModelInfo(
52
+ name="mistralai/mistral-large",
53
+ context=128000,
54
+ max_input=128000,
55
+ max_response=4096,
56
+ max_cot=4096,
57
+ thinking_supported=False,
58
+ category="IBM WatsonX",
59
+ ),
60
+ "mistralai/mistral-large-2407": LLMModelInfo(
61
+ name="mistralai/mistral-large-2407",
62
+ context=128000,
63
+ max_input=128000,
64
+ max_response=4096,
65
+ max_cot=4096,
66
+ thinking_supported=False,
67
+ category="IBM WatsonX",
68
+ ),
69
+ "openai/gpt-oss-120b": LLMModelInfo(
70
+ name="openai/gpt-oss-120b",
71
+ context=128000,
72
+ max_input=128000,
73
+ max_response=4096,
74
+ max_cot=4096,
75
+ thinking_supported=True,
76
+ category="IBM WatsonX",
77
+ ),
78
+ "openai/gpt-oss-20b": LLMModelInfo(
79
+ name="openai/gpt-oss-20b",
80
+ context=128000,
81
+ max_input=128000,
82
+ max_response=4096,
83
+ max_cot=4096,
84
+ thinking_supported=True,
85
+ category="IBM WatsonX",
86
+ ),
87
+ }