terminal-sherpa 0.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (30) hide show
  1. terminal_sherpa-0.4.0/LICENSE +21 -0
  2. terminal_sherpa-0.4.0/PKG-INFO +278 -0
  3. terminal_sherpa-0.4.0/README.md +245 -0
  4. terminal_sherpa-0.4.0/ask/__init__.py +0 -0
  5. terminal_sherpa-0.4.0/ask/config.py +94 -0
  6. terminal_sherpa-0.4.0/ask/exceptions.py +25 -0
  7. terminal_sherpa-0.4.0/ask/main.py +116 -0
  8. terminal_sherpa-0.4.0/ask/providers/__init__.py +39 -0
  9. terminal_sherpa-0.4.0/ask/providers/anthropic.py +74 -0
  10. terminal_sherpa-0.4.0/ask/providers/base.py +28 -0
  11. terminal_sherpa-0.4.0/ask/providers/gemini.py +89 -0
  12. terminal_sherpa-0.4.0/ask/providers/grok.py +118 -0
  13. terminal_sherpa-0.4.0/ask/providers/openai.py +107 -0
  14. terminal_sherpa-0.4.0/pyproject.toml +111 -0
  15. terminal_sherpa-0.4.0/setup.cfg +4 -0
  16. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/PKG-INFO +278 -0
  17. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/SOURCES.txt +28 -0
  18. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/dependency_links.txt +1 -0
  19. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/entry_points.txt +2 -0
  20. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/requires.txt +7 -0
  21. terminal_sherpa-0.4.0/terminal_sherpa.egg-info/top_level.txt +5 -0
  22. terminal_sherpa-0.4.0/test/conftest.py +72 -0
  23. terminal_sherpa-0.4.0/test/test_anthropic.py +173 -0
  24. terminal_sherpa-0.4.0/test/test_config.py +164 -0
  25. terminal_sherpa-0.4.0/test/test_exceptions.py +55 -0
  26. terminal_sherpa-0.4.0/test/test_gemini.py +244 -0
  27. terminal_sherpa-0.4.0/test/test_grok.py +247 -0
  28. terminal_sherpa-0.4.0/test/test_main.py +206 -0
  29. terminal_sherpa-0.4.0/test/test_openai.py +261 -0
  30. terminal_sherpa-0.4.0/test/test_providers.py +77 -0
@@ -0,0 +1,21 @@
1
+ MIT License
2
+
3
+ Copyright (c) 2025 Lucas Ford
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
@@ -0,0 +1,278 @@
1
+ Metadata-Version: 2.4
2
+ Name: terminal-sherpa
3
+ Version: 0.4.0
4
+ Summary: AI-powered bash command generator
5
+ Project-URL: Homepage, https://github.com/lcford2/terminal-sherpa
6
+ Project-URL: Issues, https://github.com/lcford2/terminal-sherpa/issues
7
+ Classifier: Development Status :: 4 - Beta
8
+ Classifier: Environment :: Console
9
+ Classifier: Intended Audience :: Developers
10
+ Classifier: Operating System :: OS Independent
11
+ Classifier: License :: OSI Approved :: MIT License
12
+ Classifier: Programming Language :: Python
13
+ Classifier: Programming Language :: Python :: 3.8
14
+ Classifier: Programming Language :: Python :: 3.9
15
+ Classifier: Programming Language :: Python :: 3.10
16
+ Classifier: Programming Language :: Python :: 3.11
17
+ Classifier: Programming Language :: Python :: 3.12
18
+ Classifier: Programming Language :: Python :: 3.13
19
+ Classifier: Programming Language :: Python :: 3 :: Only
20
+ Classifier: Topic :: Utilities
21
+ Classifier: Topic :: Software Development :: Libraries
22
+ Requires-Python: >=3.10
23
+ Description-Content-Type: text/markdown
24
+ License-File: LICENSE
25
+ Requires-Dist: anthropic>=0.7.0
26
+ Requires-Dist: google-genai>=1.26.0
27
+ Requires-Dist: loguru>=0.7.0
28
+ Requires-Dist: openai>=1.0.0
29
+ Requires-Dist: setuptools>=80.9.0
30
+ Requires-Dist: toml>=0.10.0
31
+ Requires-Dist: xai-sdk
32
+ Dynamic: license-file
33
+
34
+ # terminal-sherpa
35
+
36
+ A lightweight AI chat interface for fellow terminal dwellers.
37
+
38
+ Turn natural language into bash commands instantly.
39
+ Stop googling syntax and start asking.
40
+
41
+ [![PyPI - Version](https://img.shields.io/pypi/v/terminal-sherpa)](https://pypi.python.org/pypi/terminal-sherpa)
42
+ [![GitHub License](https://img.shields.io/github/license/lcford2/terminal-sherpa)](https://github.com/lcford2/terminal-sherpa/blob/main/LICENSE)
43
+ [![Python Versions](https://img.shields.io/pypi/pyversions/terminal-sherpa)](https://pypi.python.org/pypi/terminal-sherpa)
44
+ [![Actions status](https://github.com/lcford2/terminal-sherpa/actions/workflows/main.yml/badge.svg)](https://github.com/lcford2/terminal-sherpa/actions)
45
+ [![codecov](https://codecov.io/github/lcford2/terminal-sherpa/graph/badge.svg?token=2MXHNL3RHE)](https://codecov.io/github/lcford2/terminal-sherpa)
46
+
47
+ ## 🚀 Getting Started
48
+
49
+ Get up and running:
50
+
51
+ ```bash
52
+ # Install terminal-sherpa
53
+ pip install terminal-sherpa # installs the `ask` CLI tool
54
+
55
+ # Set your API key
56
+ export ANTHROPIC_API_KEY="your-key-here"
57
+
58
+ # Try it out
59
+ ask "find all .py files modified in the last week"
60
+ ```
61
+
62
+ **Example output:**
63
+
64
+ ```bash
65
+ find . -name "*.py" -mtime -7
66
+ ```
67
+
68
+ ## ✨ Features
69
+
70
+ - **Natural language to bash conversion** - Describe what you want, get the command
71
+ - **Multiple AI provider support** - Choose between Anthropic (Claude), OpenAI (GPT), Google (Gemini), and xAI (Grok) models
72
+ - **Flexible configuration system** - Set defaults, customize models, and manage API keys
73
+ - **XDG-compliant config files** - Follows standard configuration file locations
74
+ - **Verbose logging support** - Debug and understand what's happening under the hood
75
+
76
+ ## 📦 Installation
77
+
78
+ ### Requirements
79
+
80
+ - Python 3.9+
81
+ - API key for Anthropic, OpenAI, Google, or xAI
82
+
83
+ ### Install Methods
84
+
85
+ **Using pip:**
86
+
87
+ ```bash
88
+ pip install terminal-sherpa
89
+ ```
90
+
91
+ **From source:**
92
+
93
+ ```bash
94
+ git clone https://github.com/lcford2/terminal-sherpa.git
95
+ cd terminal-sherpa
96
+ uv sync
97
+ uv run ask "your prompt here"
98
+ ```
99
+
100
+ **Verify installation:**
101
+
102
+ ```bash
103
+ ask --help
104
+ ```
105
+
106
+ ## 💡 Usage
107
+
108
+ ### Basic Syntax
109
+
110
+ ```bash
111
+ ask "your natural language prompt"
112
+ ```
113
+
114
+ ### Command Options
115
+
116
+ | Option | Description | Example |
117
+ | ------------------------ | -------------------------- | ------------------------------------------- |
118
+ | `--model provider:model` | Specify provider and model | `ask --model anthropic "list files"` |
119
+ | | | `ask --model anthropic:sonnet "list files"` |
120
+ | | | `ask --model openai "list files"` |
121
+ | | | `ask --model gemini "list files"` |
122
+ | | | `ask --model gemini:pro "list files"` |
123
+ | | | `ask --model grok "list files"` |
124
+ | `--verbose` | Enable verbose logging | `ask --verbose "compress this folder"` |
125
+
126
+ ### Practical Examples
127
+
128
+ **File Operations:**
129
+
130
+ ```bash
131
+ ask "find all files larger than 100MB"
132
+ # Example output: find . -size +100M
133
+
134
+ ask "create a backup of config.txt with timestamp"
135
+ # Example output: cp config.txt config.txt.$(date +%Y%m%d_%H%M%S)
136
+ ```
137
+
138
+ **Git Commands:**
139
+
140
+ ```bash
141
+ ask "show git log for last 5 commits with one line each"
142
+ # Example output: git log --oneline -5
143
+
144
+ ask "delete all local branches that have been merged"
145
+ # Example output: git branch --merged | grep -v "\*\|main\|master" | xargs -n 1 git branch -d
146
+ ```
147
+
148
+ **System Administration:**
149
+
150
+ ```bash
151
+ ask "check disk usage of current directory sorted by size"
152
+ # Example output: du -sh * | sort -hr
153
+
154
+ ask "find processes using port 8080"
155
+ # Example output: lsof -i :8080
156
+ ```
157
+
158
+ **Text Processing:**
159
+
160
+ ```bash
161
+ ask "count lines in all Python files"
162
+ # Example output: find . -name "*.py" -exec wc -l {} + | tail -1
163
+
164
+ ask "replace all tabs with spaces in file.txt"
165
+ # Example output: sed -i 's/\t/ /g' file.txt
166
+ ```
167
+
168
+ **Network Operations:**
169
+
170
+ ```bash
171
+ ask "download file from URL and save to downloads folder"
172
+ # Example output: curl -o ~/Downloads/filename "https://example.com/file"
173
+
174
+ ask "check if port 443 is open on example.com"
175
+ # Example output: nc -zv example.com 443
176
+ ```
177
+
178
+ ## ⚙️ Configuration
179
+
180
+ ### Configuration File Locations
181
+
182
+ Ask follows XDG Base Directory Specification:
183
+
184
+ 1. `$XDG_CONFIG_HOME/ask/config.toml`
185
+ 1. `~/.config/ask/config.toml` (if XDG_CONFIG_HOME not set)
186
+ 1. `~/.ask/config.toml` (fallback)
187
+
188
+ ### Environment Variables
189
+
190
+ ```bash
191
+ export ANTHROPIC_API_KEY="your-anthropic-key"
192
+ export OPENAI_API_KEY="your-openai-key"
193
+ export GEMINI_API_KEY="your-gemini-key"
194
+ export XAI_API_KEY="your-xai-key"
195
+ ```
196
+
197
+ ### Example Configuration File
198
+
199
+ Create `~/.config/ask/config.toml`:
200
+
201
+ ```toml
202
+ [ask]
203
+ default_model = "anthropic"
204
+
205
+ [anthropic]
206
+ model = "claude-3-haiku-20240307"
207
+ max_tokens = 512
208
+
209
+ [anthropic.sonnet]
210
+ model = "claude-sonnet-4-20250514"
211
+ max_tokens = 1024
212
+
213
+ [openai]
214
+ model = "gpt-4o"
215
+ max_tokens = 1024
216
+
217
+ [gemini]
218
+ model = "gemini-2.5-flash-lite-preview-06-17"
219
+ max_tokens = 150
220
+
221
+ [gemini.pro]
222
+ model = "gemini-2.5-pro"
223
+ max_tokens = 1024
224
+
225
+ [grok]
226
+ model = "grok-3-fast"
227
+ max_tokens = 150
228
+ temperature = 0.0
229
+ ```
230
+
231
+ ## 🤖 Supported Providers
232
+
233
+ - Anthropic (Claude)
234
+ - OpenAI (GPT)
235
+ - Google (Gemini)
236
+ - xAI (Grok)
237
+
238
+ > **Note:** Get API keys from [Anthropic Console](https://console.anthropic.com/), [OpenAI Platform](https://platform.openai.com/), [Google AI Studio](https://aistudio.google.com/), or [xAI Console](https://x.ai/console)
239
+
240
+ ## 🛣️ Roadmap
241
+
242
+ - [ ] Shell integration and auto-completion
243
+ - [ ] Additional providers (Cohere, Mistral)
244
+ - [ ] Local model support (Ollama, llama.cpp)
245
+
246
+ ## 🔧 Development
247
+
248
+ ### Setup
249
+
250
+ ```bash
251
+ git clone https://github.com/lcford2/terminal-sherpa.git
252
+ cd ask
253
+ uv sync --all-groups
254
+ uv run pre-commit install
255
+ ```
256
+
257
+ ### Testing
258
+
259
+ ```bash
260
+ uv run python -m pytest
261
+ ```
262
+
263
+ ### Contributing
264
+
265
+ 1. Fork the repository
266
+ 2. Create a feature branch
267
+ 3. Make your changes
268
+ 4. Run pre-commit checks: `uv run pre-commit run --all-files`
269
+ 5. Run tests: `uv run task test`
270
+ 6. Submit a pull request
271
+
272
+ ## License
273
+
274
+ This project is licensed under the MIT License - see the LICENSE file for details.
275
+
276
+ ## Issues
277
+
278
+ Found a bug or have a feature request? Please open an issue on [GitHub Issues](https://github.com/lcford2/ask/issues).
@@ -0,0 +1,245 @@
1
+ # terminal-sherpa
2
+
3
+ A lightweight AI chat interface for fellow terminal dwellers.
4
+
5
+ Turn natural language into bash commands instantly.
6
+ Stop googling syntax and start asking.
7
+
8
+ [![PyPI - Version](https://img.shields.io/pypi/v/terminal-sherpa)](https://pypi.python.org/pypi/terminal-sherpa)
9
+ [![GitHub License](https://img.shields.io/github/license/lcford2/terminal-sherpa)](https://github.com/lcford2/terminal-sherpa/blob/main/LICENSE)
10
+ [![Python Versions](https://img.shields.io/pypi/pyversions/terminal-sherpa)](https://pypi.python.org/pypi/terminal-sherpa)
11
+ [![Actions status](https://github.com/lcford2/terminal-sherpa/actions/workflows/main.yml/badge.svg)](https://github.com/lcford2/terminal-sherpa/actions)
12
+ [![codecov](https://codecov.io/github/lcford2/terminal-sherpa/graph/badge.svg?token=2MXHNL3RHE)](https://codecov.io/github/lcford2/terminal-sherpa)
13
+
14
+ ## 🚀 Getting Started
15
+
16
+ Get up and running:
17
+
18
+ ```bash
19
+ # Install terminal-sherpa
20
+ pip install terminal-sherpa # installs the `ask` CLI tool
21
+
22
+ # Set your API key
23
+ export ANTHROPIC_API_KEY="your-key-here"
24
+
25
+ # Try it out
26
+ ask "find all .py files modified in the last week"
27
+ ```
28
+
29
+ **Example output:**
30
+
31
+ ```bash
32
+ find . -name "*.py" -mtime -7
33
+ ```
34
+
35
+ ## ✨ Features
36
+
37
+ - **Natural language to bash conversion** - Describe what you want, get the command
38
+ - **Multiple AI provider support** - Choose between Anthropic (Claude), OpenAI (GPT), Google (Gemini), and xAI (Grok) models
39
+ - **Flexible configuration system** - Set defaults, customize models, and manage API keys
40
+ - **XDG-compliant config files** - Follows standard configuration file locations
41
+ - **Verbose logging support** - Debug and understand what's happening under the hood
42
+
43
+ ## 📦 Installation
44
+
45
+ ### Requirements
46
+
47
+ - Python 3.9+
48
+ - API key for Anthropic, OpenAI, Google, or xAI
49
+
50
+ ### Install Methods
51
+
52
+ **Using pip:**
53
+
54
+ ```bash
55
+ pip install terminal-sherpa
56
+ ```
57
+
58
+ **From source:**
59
+
60
+ ```bash
61
+ git clone https://github.com/lcford2/terminal-sherpa.git
62
+ cd terminal-sherpa
63
+ uv sync
64
+ uv run ask "your prompt here"
65
+ ```
66
+
67
+ **Verify installation:**
68
+
69
+ ```bash
70
+ ask --help
71
+ ```
72
+
73
+ ## 💡 Usage
74
+
75
+ ### Basic Syntax
76
+
77
+ ```bash
78
+ ask "your natural language prompt"
79
+ ```
80
+
81
+ ### Command Options
82
+
83
+ | Option | Description | Example |
84
+ | ------------------------ | -------------------------- | ------------------------------------------- |
85
+ | `--model provider:model` | Specify provider and model | `ask --model anthropic "list files"` |
86
+ | | | `ask --model anthropic:sonnet "list files"` |
87
+ | | | `ask --model openai "list files"` |
88
+ | | | `ask --model gemini "list files"` |
89
+ | | | `ask --model gemini:pro "list files"` |
90
+ | | | `ask --model grok "list files"` |
91
+ | `--verbose` | Enable verbose logging | `ask --verbose "compress this folder"` |
92
+
93
+ ### Practical Examples
94
+
95
+ **File Operations:**
96
+
97
+ ```bash
98
+ ask "find all files larger than 100MB"
99
+ # Example output: find . -size +100M
100
+
101
+ ask "create a backup of config.txt with timestamp"
102
+ # Example output: cp config.txt config.txt.$(date +%Y%m%d_%H%M%S)
103
+ ```
104
+
105
+ **Git Commands:**
106
+
107
+ ```bash
108
+ ask "show git log for last 5 commits with one line each"
109
+ # Example output: git log --oneline -5
110
+
111
+ ask "delete all local branches that have been merged"
112
+ # Example output: git branch --merged | grep -v "\*\|main\|master" | xargs -n 1 git branch -d
113
+ ```
114
+
115
+ **System Administration:**
116
+
117
+ ```bash
118
+ ask "check disk usage of current directory sorted by size"
119
+ # Example output: du -sh * | sort -hr
120
+
121
+ ask "find processes using port 8080"
122
+ # Example output: lsof -i :8080
123
+ ```
124
+
125
+ **Text Processing:**
126
+
127
+ ```bash
128
+ ask "count lines in all Python files"
129
+ # Example output: find . -name "*.py" -exec wc -l {} + | tail -1
130
+
131
+ ask "replace all tabs with spaces in file.txt"
132
+ # Example output: sed -i 's/\t/ /g' file.txt
133
+ ```
134
+
135
+ **Network Operations:**
136
+
137
+ ```bash
138
+ ask "download file from URL and save to downloads folder"
139
+ # Example output: curl -o ~/Downloads/filename "https://example.com/file"
140
+
141
+ ask "check if port 443 is open on example.com"
142
+ # Example output: nc -zv example.com 443
143
+ ```
144
+
145
+ ## ⚙️ Configuration
146
+
147
+ ### Configuration File Locations
148
+
149
+ Ask follows XDG Base Directory Specification:
150
+
151
+ 1. `$XDG_CONFIG_HOME/ask/config.toml`
152
+ 1. `~/.config/ask/config.toml` (if XDG_CONFIG_HOME not set)
153
+ 1. `~/.ask/config.toml` (fallback)
154
+
155
+ ### Environment Variables
156
+
157
+ ```bash
158
+ export ANTHROPIC_API_KEY="your-anthropic-key"
159
+ export OPENAI_API_KEY="your-openai-key"
160
+ export GEMINI_API_KEY="your-gemini-key"
161
+ export XAI_API_KEY="your-xai-key"
162
+ ```
163
+
164
+ ### Example Configuration File
165
+
166
+ Create `~/.config/ask/config.toml`:
167
+
168
+ ```toml
169
+ [ask]
170
+ default_model = "anthropic"
171
+
172
+ [anthropic]
173
+ model = "claude-3-haiku-20240307"
174
+ max_tokens = 512
175
+
176
+ [anthropic.sonnet]
177
+ model = "claude-sonnet-4-20250514"
178
+ max_tokens = 1024
179
+
180
+ [openai]
181
+ model = "gpt-4o"
182
+ max_tokens = 1024
183
+
184
+ [gemini]
185
+ model = "gemini-2.5-flash-lite-preview-06-17"
186
+ max_tokens = 150
187
+
188
+ [gemini.pro]
189
+ model = "gemini-2.5-pro"
190
+ max_tokens = 1024
191
+
192
+ [grok]
193
+ model = "grok-3-fast"
194
+ max_tokens = 150
195
+ temperature = 0.0
196
+ ```
197
+
198
+ ## 🤖 Supported Providers
199
+
200
+ - Anthropic (Claude)
201
+ - OpenAI (GPT)
202
+ - Google (Gemini)
203
+ - xAI (Grok)
204
+
205
+ > **Note:** Get API keys from [Anthropic Console](https://console.anthropic.com/), [OpenAI Platform](https://platform.openai.com/), [Google AI Studio](https://aistudio.google.com/), or [xAI Console](https://x.ai/console)
206
+
207
+ ## 🛣️ Roadmap
208
+
209
+ - [ ] Shell integration and auto-completion
210
+ - [ ] Additional providers (Cohere, Mistral)
211
+ - [ ] Local model support (Ollama, llama.cpp)
212
+
213
+ ## 🔧 Development
214
+
215
+ ### Setup
216
+
217
+ ```bash
218
+ git clone https://github.com/lcford2/terminal-sherpa.git
219
+ cd ask
220
+ uv sync --all-groups
221
+ uv run pre-commit install
222
+ ```
223
+
224
+ ### Testing
225
+
226
+ ```bash
227
+ uv run python -m pytest
228
+ ```
229
+
230
+ ### Contributing
231
+
232
+ 1. Fork the repository
233
+ 2. Create a feature branch
234
+ 3. Make your changes
235
+ 4. Run pre-commit checks: `uv run pre-commit run --all-files`
236
+ 5. Run tests: `uv run task test`
237
+ 6. Submit a pull request
238
+
239
+ ## License
240
+
241
+ This project is licensed under the MIT License - see the LICENSE file for details.
242
+
243
+ ## Issues
244
+
245
+ Found a bug or have a feature request? Please open an issue on [GitHub Issues](https://github.com/lcford2/ask/issues).
File without changes
@@ -0,0 +1,94 @@
1
+ """Configuration loading and management module."""
2
+
3
+ import os
4
+ from pathlib import Path
5
+ from typing import Any
6
+
7
+ import toml
8
+
9
+ from ask.exceptions import ConfigurationError
10
+
11
+ SYSTEM_PROMPT = (
12
+ "You are a bash command generator. Given a user request, "
13
+ "respond with ONLY the bash command that accomplishes the task. "
14
+ "Do not include explanations, comments, or any other text. "
15
+ "Just the command.",
16
+ )
17
+
18
+
19
+ def get_config_path() -> Path | None:
20
+ """Find config file using XDG standard."""
21
+ # Primary location: $XDG_CONFIG_HOME/ask/config.toml
22
+ xdg_config_home = os.environ.get("XDG_CONFIG_HOME")
23
+ if xdg_config_home:
24
+ primary_path = Path(xdg_config_home) / "ask" / "config.toml"
25
+ else:
26
+ primary_path = Path.home() / ".config" / "ask" / "config.toml"
27
+
28
+ if primary_path.exists():
29
+ return primary_path
30
+
31
+ # Fallback location: ~/.ask/config.toml
32
+ fallback_path = Path.home() / ".ask" / "config.toml"
33
+ if fallback_path.exists():
34
+ return fallback_path
35
+
36
+ return None
37
+
38
+
39
+ def load_config() -> dict[str, Any]:
40
+ """Load configuration from TOML file."""
41
+ config_path = get_config_path()
42
+
43
+ if config_path is None:
44
+ return {}
45
+
46
+ try:
47
+ with open(config_path) as f:
48
+ return toml.load(f)
49
+ except Exception as e:
50
+ raise ConfigurationError(f"Failed to load config file {config_path}: {e}")
51
+
52
+
53
+ def get_provider_config(
54
+ config: dict[str, Any], provider_spec: str
55
+ ) -> tuple[str, dict[str, Any]]:
56
+ """Parse provider:model syntax and return provider name and config."""
57
+ if ":" in provider_spec:
58
+ provider_name, model_name = provider_spec.split(":", 1)
59
+
60
+ # First try to get nested config (e.g., anthropic.haiku)
61
+ provider_section = config.get(provider_name, {})
62
+ if isinstance(provider_section, dict) and model_name in provider_section:
63
+ provider_config = provider_section[model_name]
64
+ else:
65
+ # Fall back to base provider config
66
+ provider_config = provider_section
67
+ else:
68
+ provider_name = provider_spec
69
+ provider_config = config.get(provider_name, {})
70
+
71
+ # Add global settings
72
+ global_config = config.get("ask", {})
73
+
74
+ # Merge global and provider-specific config
75
+ merged_config = {**global_config, **provider_config}
76
+
77
+ return provider_name, merged_config
78
+
79
+
80
+ def get_default_model(config: dict[str, Any]) -> str | None:
81
+ """Get default model from configuration."""
82
+ global_config = config.get("ask", {})
83
+ return global_config.get("default_model")
84
+
85
+
86
+ def get_default_provider() -> str | None:
87
+ """Determine fallback provider from environment variables."""
88
+ # Check for API keys in order of preference: claude -> openai
89
+ if os.environ.get("ANTHROPIC_API_KEY"):
90
+ return "anthropic"
91
+ elif os.environ.get("OPENAI_API_KEY"):
92
+ return "openai"
93
+
94
+ return None
@@ -0,0 +1,25 @@
1
+ """Custom exception classes for the ask CLI tool."""
2
+
3
+
4
+ class ConfigurationError(Exception):
5
+ """Raised when there are configuration-related errors."""
6
+
7
+ pass
8
+
9
+
10
+ class AuthenticationError(Exception):
11
+ """Raised when authentication fails."""
12
+
13
+ pass
14
+
15
+
16
+ class APIError(Exception):
17
+ """Raised when API requests fail."""
18
+
19
+ pass
20
+
21
+
22
+ class RateLimitError(APIError):
23
+ """Raised when API rate limits are exceeded."""
24
+
25
+ pass