moth-ai 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/README.md +319 -0
- package/dist/agent/orchestrator.js +141 -0
- package/dist/agent/types.js +1 -0
- package/dist/config/configManager.js +62 -0
- package/dist/config/keychain.js +20 -0
- package/dist/context/ignore.js +27 -0
- package/dist/context/manager.js +62 -0
- package/dist/context/scanner.js +41 -0
- package/dist/context/types.js +1 -0
- package/dist/editing/patcher.js +37 -0
- package/dist/index.js +401 -0
- package/dist/llm/claudeAdapter.js +47 -0
- package/dist/llm/cohereAdapter.js +42 -0
- package/dist/llm/factory.js +30 -0
- package/dist/llm/geminiAdapter.js +55 -0
- package/dist/llm/openAIAdapter.js +45 -0
- package/dist/llm/types.js +1 -0
- package/dist/planning/todoManager.js +23 -0
- package/dist/tools/definitions.js +187 -0
- package/dist/tools/factory.js +196 -0
- package/dist/tools/registry.js +21 -0
- package/dist/tools/types.js +1 -0
- package/dist/ui/App.js +387 -0
- package/dist/ui/ProfileManager.js +51 -0
- package/dist/ui/components/CommandPalette.js +29 -0
- package/dist/ui/components/CustomTextInput.js +75 -0
- package/dist/ui/components/FileAutocomplete.js +16 -0
- package/dist/ui/components/FileChip.js +8 -0
- package/dist/ui/components/FlameLogo.js +40 -0
- package/dist/ui/components/WordFlame.js +10 -0
- package/dist/ui/components/WordMoth.js +10 -0
- package/dist/ui/wizards/LLMRemover.js +68 -0
- package/dist/ui/wizards/LLMWizard.js +149 -0
- package/dist/utils/fileUtils.js +67 -0
- package/dist/utils/paths.js +22 -0
- package/dist/utils/text.js +49 -0
- package/docs/architecture.md +63 -0
- package/docs/core_logic.md +53 -0
- package/docs/index.md +30 -0
- package/docs/llm_integration.md +49 -0
- package/docs/ui_components.md +44 -0
- package/package.json +70 -0
package/README.md
ADDED
|
@@ -0,0 +1,319 @@
|
|
|
1
|
+
# 🦋 Moth AI
|
|
2
|
+
|
|
3
|
+
[](https://www.npmjs.com/package/@kishlay42/moth-ai)
|
|
4
|
+
[](https://opensource.org/licenses/ISC)
|
|
5
|
+
[](https://nodejs.org)
|
|
6
|
+
|
|
7
|
+
**The Intelligent, Local-First CLI Coding Assistant**
|
|
8
|
+
|
|
9
|
+
Moth AI is a powerful **terminal-native coding assistant** built for developers who value **privacy, speed, and control**. It lives inside your terminal, understands your project context, and helps you **write, debug, refactor, and reason about code** using both **local and cloud LLMs**.
|
|
10
|
+
|
|
11
|
+
<img width="1095" height="504" alt="Moth AI Screenshot" src="https://github.com/user-attachments/assets/23b83a6b-2b63-45af-b9ec-a6dcb0a89b2f" />
|
|
12
|
+
|
|
13
|
+
---
|
|
14
|
+
|
|
15
|
+
## 📦 Installation
|
|
16
|
+
|
|
17
|
+
### Global Installation (Recommended)
|
|
18
|
+
|
|
19
|
+
Install Moth AI globally to use it anywhere on your system:
|
|
20
|
+
|
|
21
|
+
```bash
|
|
22
|
+
npm install -g @kishlay42/moth-ai
|
|
23
|
+
```
|
|
24
|
+
|
|
25
|
+
After installation, simply run:
|
|
26
|
+
|
|
27
|
+
```bash
|
|
28
|
+
moth
|
|
29
|
+
```
|
|
30
|
+
|
|
31
|
+
### Local Installation
|
|
32
|
+
|
|
33
|
+
Install in a specific project:
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
npm install @kishlay42/moth-ai
|
|
37
|
+
```
|
|
38
|
+
|
|
39
|
+
Run using npx:
|
|
40
|
+
|
|
41
|
+
```bash
|
|
42
|
+
npx moth
|
|
43
|
+
```
|
|
44
|
+
|
|
45
|
+
### Requirements
|
|
46
|
+
|
|
47
|
+
- **Node.js**: >= 18.0.0
|
|
48
|
+
- **npm**: >= 8.0.0
|
|
49
|
+
|
|
50
|
+
---
|
|
51
|
+
|
|
52
|
+
## 🚀 Quick Start
|
|
53
|
+
|
|
54
|
+
1. **Install Moth AI globally:**
|
|
55
|
+
```bash
|
|
56
|
+
npm install -g @kishlay42/moth-ai
|
|
57
|
+
```
|
|
58
|
+
|
|
59
|
+
2. **Add your first LLM profile:**
|
|
60
|
+
```bash
|
|
61
|
+
moth llm add
|
|
62
|
+
```
|
|
63
|
+
|
|
64
|
+
Choose from:
|
|
65
|
+
- **Local models** (via Ollama) - Free, private, offline
|
|
66
|
+
- **Cloud providers** (OpenAI, Anthropic, Google) - Requires API key
|
|
67
|
+
|
|
68
|
+
3. **Start chatting:**
|
|
69
|
+
```bash
|
|
70
|
+
moth
|
|
71
|
+
```
|
|
72
|
+
|
|
73
|
+
4. **Use the command palette:**
|
|
74
|
+
- Press `Ctrl+U` to access all commands
|
|
75
|
+
- Switch profiles, toggle autopilot, and more
|
|
76
|
+
|
|
77
|
+
---
|
|
78
|
+
|
|
79
|
+
## ✨ Key Features
|
|
80
|
+
|
|
81
|
+
### 🧠 LLM-Agnostic & Local-First
|
|
82
|
+
|
|
83
|
+
Use **any LLM**, local or cloud — switch instantly without changing workflows.
|
|
84
|
+
|
|
85
|
+
- **Local (via Ollama)**
|
|
86
|
+
Run models like **Llama 3**, **Mistral**, **Gemma**, and **DeepSeek-Coder** locally
|
|
87
|
+
→ Zero latency, full privacy, offline-friendly
|
|
88
|
+
|
|
89
|
+
- **Cloud Providers**
|
|
90
|
+
Plug in your own API keys for:
|
|
91
|
+
- OpenAI (GPT-4 / GPT-4o)
|
|
92
|
+
- Anthropic (Claude 3.5 Sonnet)
|
|
93
|
+
- Google (Gemini)
|
|
94
|
+
|
|
95
|
+
<img width="1093" height="241" alt="LLM Switching" src="https://github.com/user-attachments/assets/2de67c9d-f562-4ce3-8bc6-51e2066b69ae" />
|
|
96
|
+
|
|
97
|
+
---
|
|
98
|
+
|
|
99
|
+
### 🤖 Agentic Capabilities
|
|
100
|
+
|
|
101
|
+
Moth is not just a chatbot — it's an **AI agent**.
|
|
102
|
+
|
|
103
|
+
- **Task Planning** – Break complex goals into executable steps
|
|
104
|
+
- **File Editing** – Precise diffs, patches, and refactors
|
|
105
|
+
- **Terminal Control** – Run builds, tests, and Git commands from chat
|
|
106
|
+
- **Context-Aware** – Understands your project structure and codebase
|
|
107
|
+
|
|
108
|
+
---
|
|
109
|
+
|
|
110
|
+
### 🛡️ Permission-First by Design
|
|
111
|
+
|
|
112
|
+
You stay in control — always.
|
|
113
|
+
|
|
114
|
+
- Explicit approval before file edits or command execution
|
|
115
|
+
- Granular permissions per action
|
|
116
|
+
- **Autopilot mode** for trusted workflows
|
|
117
|
+
- Feedback loop to guide the agent instead of blind execution
|
|
118
|
+
|
|
119
|
+
---
|
|
120
|
+
|
|
121
|
+
### 🎭 Moth Profiles
|
|
122
|
+
|
|
123
|
+
Save and switch between different AI personalities.
|
|
124
|
+
|
|
125
|
+
- **Coding Profile** – Optimized for TypeScript / Python
|
|
126
|
+
- **Architecture Profile** – Reasoning-focused for system design
|
|
127
|
+
- **Fast Profile** – Lightweight local model for quick answers
|
|
128
|
+
|
|
129
|
+
---
|
|
130
|
+
|
|
131
|
+
## � CLI Commands
|
|
132
|
+
|
|
133
|
+
### Main Commands
|
|
134
|
+
|
|
135
|
+
```bash
|
|
136
|
+
# Start interactive chat
|
|
137
|
+
moth
|
|
138
|
+
|
|
139
|
+
# Show help
|
|
140
|
+
moth --help
|
|
141
|
+
|
|
142
|
+
# Display version
|
|
143
|
+
moth --version
|
|
144
|
+
```
|
|
145
|
+
|
|
146
|
+
### LLM Profile Management
|
|
147
|
+
|
|
148
|
+
```bash
|
|
149
|
+
# Add a new LLM profile
|
|
150
|
+
moth llm add
|
|
151
|
+
|
|
152
|
+
# List all configured profiles
|
|
153
|
+
moth llm list
|
|
154
|
+
|
|
155
|
+
# Switch to a different profile
|
|
156
|
+
moth llm use
|
|
157
|
+
|
|
158
|
+
# Remove a profile
|
|
159
|
+
moth llm remove
|
|
160
|
+
```
|
|
161
|
+
|
|
162
|
+
### Keyboard Shortcuts
|
|
163
|
+
|
|
164
|
+
- **Ctrl+U** - Open command palette
|
|
165
|
+
- **Ctrl+C** - Exit chat
|
|
166
|
+
- **Arrow Keys** - Navigate command palette
|
|
167
|
+
- **Enter** - Execute selected command
|
|
168
|
+
|
|
169
|
+
---
|
|
170
|
+
|
|
171
|
+
## ⚙️ Configuration
|
|
172
|
+
|
|
173
|
+
Moth AI stores configuration in `~/.moth/config.yaml`
|
|
174
|
+
|
|
175
|
+
### Example Configuration
|
|
176
|
+
|
|
177
|
+
```yaml
|
|
178
|
+
profiles:
|
|
179
|
+
- name: "gpt-4"
|
|
180
|
+
provider: "openai"
|
|
181
|
+
model: "gpt-4"
|
|
182
|
+
apiKey: "sk-..."
|
|
183
|
+
|
|
184
|
+
- name: "local-llama"
|
|
185
|
+
provider: "ollama"
|
|
186
|
+
model: "llama3"
|
|
187
|
+
baseUrl: "http://localhost:11434"
|
|
188
|
+
|
|
189
|
+
activeProfile: "gpt-4"
|
|
190
|
+
```
|
|
191
|
+
|
|
192
|
+
### Setting Up Ollama (Local Models)
|
|
193
|
+
|
|
194
|
+
1. Install Ollama: https://ollama.ai
|
|
195
|
+
2. Pull a model:
|
|
196
|
+
```bash
|
|
197
|
+
ollama pull llama3
|
|
198
|
+
```
|
|
199
|
+
3. Add to Moth:
|
|
200
|
+
```bash
|
|
201
|
+
moth llm add
|
|
202
|
+
# Select "Ollama" and choose your model
|
|
203
|
+
```
|
|
204
|
+
|
|
205
|
+
### Setting Up Cloud Providers
|
|
206
|
+
|
|
207
|
+
#### OpenAI
|
|
208
|
+
```bash
|
|
209
|
+
moth llm add
|
|
210
|
+
# Select "OpenAI"
|
|
211
|
+
# Enter your API key from https://platform.openai.com/api-keys
|
|
212
|
+
```
|
|
213
|
+
|
|
214
|
+
#### Anthropic (Claude)
|
|
215
|
+
```bash
|
|
216
|
+
moth llm add
|
|
217
|
+
# Select "Anthropic"
|
|
218
|
+
# Enter your API key from https://console.anthropic.com/
|
|
219
|
+
```
|
|
220
|
+
|
|
221
|
+
#### Google (Gemini)
|
|
222
|
+
```bash
|
|
223
|
+
moth llm add
|
|
224
|
+
# Select "Google"
|
|
225
|
+
# Enter your API key from https://makersuite.google.com/app/apikey
|
|
226
|
+
```
|
|
227
|
+
|
|
228
|
+
---
|
|
229
|
+
|
|
230
|
+
## 💡 Usage Examples
|
|
231
|
+
|
|
232
|
+
### Basic Chat
|
|
233
|
+
|
|
234
|
+
```bash
|
|
235
|
+
moth
|
|
236
|
+
> How do I implement a binary search in TypeScript?
|
|
237
|
+
```
|
|
238
|
+
|
|
239
|
+
### Code Refactoring
|
|
240
|
+
|
|
241
|
+
```bash
|
|
242
|
+
moth
|
|
243
|
+
> Refactor src/utils.ts to use async/await instead of promises
|
|
244
|
+
```
|
|
245
|
+
|
|
246
|
+
### Debugging
|
|
247
|
+
|
|
248
|
+
```bash
|
|
249
|
+
moth
|
|
250
|
+
> Why is my React component re-rendering infinitely?
|
|
251
|
+
```
|
|
252
|
+
|
|
253
|
+
### Project Analysis
|
|
254
|
+
|
|
255
|
+
```bash
|
|
256
|
+
moth
|
|
257
|
+
> Analyze the architecture of this project and suggest improvements
|
|
258
|
+
```
|
|
259
|
+
|
|
260
|
+
---
|
|
261
|
+
|
|
262
|
+
## 🔧 Troubleshooting
|
|
263
|
+
|
|
264
|
+
### Command not found: moth
|
|
265
|
+
|
|
266
|
+
Make sure the global npm bin directory is in your PATH:
|
|
267
|
+
|
|
268
|
+
```bash
|
|
269
|
+
npm config get prefix
|
|
270
|
+
```
|
|
271
|
+
|
|
272
|
+
Add the bin directory to your PATH in `~/.bashrc` or `~/.zshrc`:
|
|
273
|
+
|
|
274
|
+
```bash
|
|
275
|
+
export PATH="$PATH:$(npm config get prefix)/bin"
|
|
276
|
+
```
|
|
277
|
+
|
|
278
|
+
### Ollama connection error
|
|
279
|
+
|
|
280
|
+
Ensure Ollama is running:
|
|
281
|
+
|
|
282
|
+
```bash
|
|
283
|
+
ollama serve
|
|
284
|
+
```
|
|
285
|
+
|
|
286
|
+
### API key errors
|
|
287
|
+
|
|
288
|
+
Verify your API key is correctly configured:
|
|
289
|
+
|
|
290
|
+
```bash
|
|
291
|
+
moth llm list
|
|
292
|
+
# Check if your profile shows the correct provider
|
|
293
|
+
```
|
|
294
|
+
|
|
295
|
+
---
|
|
296
|
+
|
|
297
|
+
## 📝 License
|
|
298
|
+
|
|
299
|
+
ISC License - see [LICENSE](LICENSE) file for details
|
|
300
|
+
|
|
301
|
+
---
|
|
302
|
+
|
|
303
|
+
## 🤝 Contributing
|
|
304
|
+
|
|
305
|
+
Contributions are welcome! Please feel free to submit a Pull Request.
|
|
306
|
+
|
|
307
|
+
Repository: https://github.com/kishlay42/Moth-ai
|
|
308
|
+
|
|
309
|
+
---
|
|
310
|
+
|
|
311
|
+
## 📚 Links
|
|
312
|
+
|
|
313
|
+
- **npm Package**: https://www.npmjs.com/package/@kishlay42/moth-ai
|
|
314
|
+
- **GitHub**: https://github.com/kishlay42/Moth-ai
|
|
315
|
+
- **Issues**: https://github.com/kishlay42/Moth-ai/issues
|
|
316
|
+
|
|
317
|
+
---
|
|
318
|
+
|
|
319
|
+
**Made with ❤️ for developers who code in the terminal**
|
|
@@ -0,0 +1,141 @@
|
|
|
1
|
+
import { formatFileForContext, truncateFileContent } from '../utils/fileUtils.js';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
export class AgentOrchestrator {
|
|
5
|
+
config;
|
|
6
|
+
state;
|
|
7
|
+
tools;
|
|
8
|
+
root;
|
|
9
|
+
constructor(config, registry, root = process.cwd()) {
|
|
10
|
+
this.config = config;
|
|
11
|
+
this.tools = registry;
|
|
12
|
+
this.root = root;
|
|
13
|
+
this.state = {
|
|
14
|
+
history: [],
|
|
15
|
+
maxSteps: config.maxSteps || 10
|
|
16
|
+
};
|
|
17
|
+
}
|
|
18
|
+
async *run(prompt, history = []) {
|
|
19
|
+
let currentPrompt = prompt;
|
|
20
|
+
for (let i = 0; i < this.state.maxSteps; i++) {
|
|
21
|
+
// Collect all attached files from history
|
|
22
|
+
const allAttachedFiles = this.collectAttachedFiles(history);
|
|
23
|
+
// Construct system prompt with tools and file context
|
|
24
|
+
const systemPrompt = await this.buildSystemPrompt(allAttachedFiles);
|
|
25
|
+
// Full context: System -> History -> Current State
|
|
26
|
+
const messages = [
|
|
27
|
+
{ role: 'user', content: systemPrompt }, // In many APIs system prompt is special, here we use user role as generic fallback or modify Client to handle system
|
|
28
|
+
...history,
|
|
29
|
+
{ role: 'user', content: currentPrompt }
|
|
30
|
+
];
|
|
31
|
+
// This is a simplified Mock for the ReAct loop to start with
|
|
32
|
+
// In reality, we need to handle the LLM raw output, parse "Thought" and "Tool Call"
|
|
33
|
+
// For now, we will rely on a Structured Output or strict parsing if the Provider supports it.
|
|
34
|
+
// Since we are using Gemini, we can ask for JSON mode or specific formatting.
|
|
35
|
+
const responseText = await this.callLLM(messages);
|
|
36
|
+
// Parse response
|
|
37
|
+
let step;
|
|
38
|
+
try {
|
|
39
|
+
step = this.parseResponse(responseText);
|
|
40
|
+
}
|
|
41
|
+
catch (e) {
|
|
42
|
+
yield { thought: "Failed to parse LLM response. Retrying...", toolOutput: `Error: ${e}` };
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
this.state.history.push(step);
|
|
46
|
+
yield step;
|
|
47
|
+
if (step.finalAnswer) {
|
|
48
|
+
return step.finalAnswer;
|
|
49
|
+
}
|
|
50
|
+
if (step.toolCall) {
|
|
51
|
+
// Execute tool
|
|
52
|
+
const result = await this.executeTool(step.toolCall.name, step.toolCall.arguments);
|
|
53
|
+
step.toolOutput = result;
|
|
54
|
+
// Re-feed result to LLM
|
|
55
|
+
currentPrompt = `Tool '${step.toolCall.name}' returned: ${result}`;
|
|
56
|
+
}
|
|
57
|
+
}
|
|
58
|
+
return "Max steps reached.";
|
|
59
|
+
}
|
|
60
|
+
async buildSystemPrompt(attachedFiles = []) {
|
|
61
|
+
const toolDefs = this.tools.getDefinitions().map(t => `${t.name}: ${t.description} Params: ${JSON.stringify(t.parameters)}`).join('\n');
|
|
62
|
+
let fileContext = '';
|
|
63
|
+
if (attachedFiles.length > 0) {
|
|
64
|
+
fileContext = await this.buildFileContext(attachedFiles);
|
|
65
|
+
}
|
|
66
|
+
return `You are Moth, an intelligent CLI coding assistant.
|
|
67
|
+
You have access to the following tools:
|
|
68
|
+
${toolDefs}
|
|
69
|
+
|
|
70
|
+
${fileContext}
|
|
71
|
+
|
|
72
|
+
IMPORTANT GUIDELINES:
|
|
73
|
+
1. For general questions, explanations, or code snippets that don't need to be saved, use "finalAnswer".
|
|
74
|
+
2. Do NOT use "write_to_file" unless the user explicitly asks to save a file or implies a persistent change.
|
|
75
|
+
3. If the user asks for "Hello World code", just show it in the explanation (finalAnswer). Do NOT create a file for it.
|
|
76
|
+
4. Be concise and helpful.
|
|
77
|
+
5. If files are referenced above, use that context to answer questions about them.
|
|
78
|
+
|
|
79
|
+
Format your response exactly as a JSON object:
|
|
80
|
+
{
|
|
81
|
+
"thought": "your reasoning",
|
|
82
|
+
"toolCall": { "name": "tool_name", "arguments": { ... } }
|
|
83
|
+
}
|
|
84
|
+
OR if you are done/replying:
|
|
85
|
+
{
|
|
86
|
+
"thought": "reasoning",
|
|
87
|
+
"finalAnswer": "your response/code/explanation"
|
|
88
|
+
}
|
|
89
|
+
`;
|
|
90
|
+
}
|
|
91
|
+
collectAttachedFiles(history) {
|
|
92
|
+
const files = new Set();
|
|
93
|
+
for (const msg of history) {
|
|
94
|
+
if (msg.attachedFiles) {
|
|
95
|
+
msg.attachedFiles.forEach(f => files.add(f));
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
return Array.from(files);
|
|
99
|
+
}
|
|
100
|
+
async buildFileContext(filePaths) {
|
|
101
|
+
const fileContents = ['=== Referenced Files ===\n'];
|
|
102
|
+
for (const filePath of filePaths) {
|
|
103
|
+
try {
|
|
104
|
+
const fullPath = path.join(this.root, filePath);
|
|
105
|
+
// Security: Prevent breaking out of root
|
|
106
|
+
if (!fullPath.startsWith(this.root)) {
|
|
107
|
+
fileContents.push(`File: ${filePath}\nError: Access denied (outside project root)\n`);
|
|
108
|
+
continue;
|
|
109
|
+
}
|
|
110
|
+
const content = await fs.readFile(fullPath, 'utf-8');
|
|
111
|
+
const truncated = truncateFileContent(content, 500);
|
|
112
|
+
const formatted = formatFileForContext(filePath, truncated);
|
|
113
|
+
fileContents.push(formatted);
|
|
114
|
+
}
|
|
115
|
+
catch (e) {
|
|
116
|
+
fileContents.push(`File: ${filePath}\nError: ${e.message}\n`);
|
|
117
|
+
}
|
|
118
|
+
}
|
|
119
|
+
return fileContents.join('\n');
|
|
120
|
+
}
|
|
121
|
+
async callLLM(messages) {
|
|
122
|
+
// Direct integration with LLM Client
|
|
123
|
+
// This assumes Client has a simple chat interface returning string
|
|
124
|
+
// We might need to adjust LLMClient interface to support non-streaming one-off calls
|
|
125
|
+
// Placeholder: We will need to implement a 'generate' method on LLMClient
|
|
126
|
+
// or collect the stream.
|
|
127
|
+
let fullText = "";
|
|
128
|
+
for await (const chunk of this.config.model.chatStream(messages)) {
|
|
129
|
+
fullText += chunk;
|
|
130
|
+
}
|
|
131
|
+
return fullText;
|
|
132
|
+
}
|
|
133
|
+
parseResponse(text) {
|
|
134
|
+
// Clean markdown code blocks if present
|
|
135
|
+
const jsonText = text.replace(/```json/g, '').replace(/```/g, '').trim();
|
|
136
|
+
return JSON.parse(jsonText);
|
|
137
|
+
}
|
|
138
|
+
async executeTool(name, args) {
|
|
139
|
+
return await this.tools.execute(name, args);
|
|
140
|
+
}
|
|
141
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import * as fs from 'fs';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import yaml from 'js-yaml';
|
|
4
|
+
import { CONFIG_DIR, ensureConfigDir } from '../utils/paths.js';
|
|
5
|
+
const CONFIG_FILE = path.join(CONFIG_DIR, 'profiles.yaml');
|
|
6
|
+
const DEFAULT_CONFIG = {
|
|
7
|
+
profiles: [],
|
|
8
|
+
};
|
|
9
|
+
export function loadConfig() {
|
|
10
|
+
ensureConfigDir();
|
|
11
|
+
if (!fs.existsSync(CONFIG_FILE)) {
|
|
12
|
+
saveConfig(DEFAULT_CONFIG);
|
|
13
|
+
return DEFAULT_CONFIG;
|
|
14
|
+
}
|
|
15
|
+
try {
|
|
16
|
+
const content = fs.readFileSync(CONFIG_FILE, 'utf8');
|
|
17
|
+
const parsed = yaml.load(content);
|
|
18
|
+
// ensure structure matches
|
|
19
|
+
return {
|
|
20
|
+
profiles: parsed?.profiles || [],
|
|
21
|
+
activeProfile: parsed?.activeProfile,
|
|
22
|
+
username: parsed?.username
|
|
23
|
+
};
|
|
24
|
+
}
|
|
25
|
+
catch (e) {
|
|
26
|
+
// console.error('Failed to load config, using default', e);
|
|
27
|
+
// ^ Silence for now or use UI error in future
|
|
28
|
+
return DEFAULT_CONFIG;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
export function saveConfig(config) {
|
|
32
|
+
ensureConfigDir();
|
|
33
|
+
fs.writeFileSync(CONFIG_FILE, yaml.dump(config), 'utf8');
|
|
34
|
+
}
|
|
35
|
+
export function addProfile(config, profile) {
|
|
36
|
+
const existingIndex = config.profiles.findIndex(p => p.name === profile.name);
|
|
37
|
+
if (existingIndex >= 0) {
|
|
38
|
+
config.profiles[existingIndex] = profile;
|
|
39
|
+
}
|
|
40
|
+
else {
|
|
41
|
+
config.profiles.push(profile);
|
|
42
|
+
}
|
|
43
|
+
return config;
|
|
44
|
+
}
|
|
45
|
+
export function removeProfile(config, name) {
|
|
46
|
+
config.profiles = config.profiles.filter(p => p.name !== name);
|
|
47
|
+
if (config.activeProfile === name) {
|
|
48
|
+
config.activeProfile = undefined;
|
|
49
|
+
}
|
|
50
|
+
return config;
|
|
51
|
+
}
|
|
52
|
+
export function setActiveProfile(config, name) {
|
|
53
|
+
const exists = config.profiles.some(p => p.name === name);
|
|
54
|
+
if (exists) {
|
|
55
|
+
config.activeProfile = name;
|
|
56
|
+
}
|
|
57
|
+
return config;
|
|
58
|
+
}
|
|
59
|
+
export function setUsername(config, username) {
|
|
60
|
+
config.username = username;
|
|
61
|
+
return config;
|
|
62
|
+
}
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
import keytar from 'keytar';
|
|
2
|
+
const SERVICE_NAME = 'moth-cli';
|
|
3
|
+
const LEGACY_SERVICE_NAME = 'saute-cli';
|
|
4
|
+
export async function setApiKey(profileName, key) {
|
|
5
|
+
await keytar.setPassword(SERVICE_NAME, profileName, key);
|
|
6
|
+
}
|
|
7
|
+
export async function getApiKey(profileName) {
|
|
8
|
+
let key = await keytar.getPassword(SERVICE_NAME, profileName);
|
|
9
|
+
if (!key) {
|
|
10
|
+
// Try legacy service and migrate if found
|
|
11
|
+
key = await keytar.getPassword(LEGACY_SERVICE_NAME, profileName);
|
|
12
|
+
if (key) {
|
|
13
|
+
await keytar.setPassword(SERVICE_NAME, profileName, key);
|
|
14
|
+
}
|
|
15
|
+
}
|
|
16
|
+
return key;
|
|
17
|
+
}
|
|
18
|
+
export async function deleteApiKey(profileName) {
|
|
19
|
+
return keytar.deletePassword(SERVICE_NAME, profileName);
|
|
20
|
+
}
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import ignore from 'ignore';
|
|
2
|
+
import * as fs from 'fs';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
export class IgnoreManager {
|
|
5
|
+
ig = ignore();
|
|
6
|
+
constructor(root) {
|
|
7
|
+
this.loadIgnoreFile(root);
|
|
8
|
+
// Always ignore .git and node_modules
|
|
9
|
+
this.ig.add(['.git', 'node_modules', '.moth', 'dist', 'coverage']);
|
|
10
|
+
}
|
|
11
|
+
loadIgnoreFile(root) {
|
|
12
|
+
const ignorePath = path.join(root, '.gitignore');
|
|
13
|
+
if (fs.existsSync(ignorePath)) {
|
|
14
|
+
try {
|
|
15
|
+
const content = fs.readFileSync(ignorePath, 'utf8');
|
|
16
|
+
this.ig.add(content);
|
|
17
|
+
}
|
|
18
|
+
catch (error) {
|
|
19
|
+
console.warn('Failed to load .gitignore:', error);
|
|
20
|
+
}
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
shouldIgnore(filePath) {
|
|
24
|
+
// ignore package expects relative paths
|
|
25
|
+
return this.ig.ignores(filePath);
|
|
26
|
+
}
|
|
27
|
+
}
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import { ProjectScanner } from './scanner.js';
|
|
2
|
+
import * as fs from 'fs/promises';
|
|
3
|
+
import * as path from 'path';
|
|
4
|
+
export class ContextManager {
|
|
5
|
+
scanner;
|
|
6
|
+
root;
|
|
7
|
+
constructor(root) {
|
|
8
|
+
this.root = root;
|
|
9
|
+
this.scanner = new ProjectScanner(root);
|
|
10
|
+
}
|
|
11
|
+
async gather(request) {
|
|
12
|
+
const filePaths = await this.scanner.scan();
|
|
13
|
+
const files = [];
|
|
14
|
+
// Simple Scoring:
|
|
15
|
+
// 1. Exact filename match (1.0)
|
|
16
|
+
// 2. Query terms in path (0.5)
|
|
17
|
+
// 3. Default (0.1)
|
|
18
|
+
// Normalize query terms
|
|
19
|
+
const terms = request.query.toLowerCase().split(/\s+/);
|
|
20
|
+
for (const filePath of filePaths) {
|
|
21
|
+
let score = 0.1;
|
|
22
|
+
const lowerPath = filePath.toLowerCase();
|
|
23
|
+
const basename = path.basename(lowerPath);
|
|
24
|
+
if (terms.some(t => basename === t)) {
|
|
25
|
+
score = 1.0;
|
|
26
|
+
}
|
|
27
|
+
else if (terms.some(t => lowerPath.includes(t))) {
|
|
28
|
+
score = 0.5;
|
|
29
|
+
}
|
|
30
|
+
// Basic Tiering Logic (Placeholder)
|
|
31
|
+
// If score > 0.8 => Full
|
|
32
|
+
// If score > 0.4 => Summary (but we don't have summarizer yet, so Path)
|
|
33
|
+
// Else => Path
|
|
34
|
+
let tier = 'path';
|
|
35
|
+
let content;
|
|
36
|
+
if (score >= 0.8) {
|
|
37
|
+
tier = 'full';
|
|
38
|
+
try {
|
|
39
|
+
// Limit file read size for safety
|
|
40
|
+
content = await fs.readFile(path.join(this.root, filePath), 'utf8');
|
|
41
|
+
// Truncate if too huge? (TODO)
|
|
42
|
+
}
|
|
43
|
+
catch (e) {
|
|
44
|
+
console.warn(`Failed to read ${filePath}`, e);
|
|
45
|
+
tier = 'path';
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
files.push({
|
|
49
|
+
path: filePath,
|
|
50
|
+
relevance: score,
|
|
51
|
+
tier,
|
|
52
|
+
content
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
// Sort by relevance
|
|
56
|
+
files.sort((a, b) => b.relevance - a.relevance);
|
|
57
|
+
return {
|
|
58
|
+
files,
|
|
59
|
+
totalTokens: 0 // Placeholder
|
|
60
|
+
};
|
|
61
|
+
}
|
|
62
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import * as fs from 'fs/promises';
|
|
2
|
+
import * as path from 'path';
|
|
3
|
+
import { IgnoreManager } from './ignore.js';
|
|
4
|
+
export class ProjectScanner {
|
|
5
|
+
root;
|
|
6
|
+
ignoreManager;
|
|
7
|
+
constructor(root) {
|
|
8
|
+
this.root = root;
|
|
9
|
+
this.ignoreManager = new IgnoreManager(root);
|
|
10
|
+
}
|
|
11
|
+
async scan() {
|
|
12
|
+
const files = [];
|
|
13
|
+
await this.scanDir('', files);
|
|
14
|
+
return files;
|
|
15
|
+
}
|
|
16
|
+
async scanDir(relativeDir, fileList) {
|
|
17
|
+
const fullDir = path.join(this.root, relativeDir);
|
|
18
|
+
// Check if directory itself is ignored
|
|
19
|
+
if (relativeDir && this.ignoreManager.shouldIgnore(relativeDir)) {
|
|
20
|
+
return;
|
|
21
|
+
}
|
|
22
|
+
try {
|
|
23
|
+
const entries = await fs.readdir(fullDir, { withFileTypes: true });
|
|
24
|
+
for (const entry of entries) {
|
|
25
|
+
const relativePath = path.join(relativeDir, entry.name);
|
|
26
|
+
if (this.ignoreManager.shouldIgnore(relativePath)) {
|
|
27
|
+
continue;
|
|
28
|
+
}
|
|
29
|
+
if (entry.isDirectory()) {
|
|
30
|
+
await this.scanDir(relativePath, fileList);
|
|
31
|
+
}
|
|
32
|
+
else if (entry.isFile()) {
|
|
33
|
+
fileList.push(relativePath);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
}
|
|
37
|
+
catch (error) {
|
|
38
|
+
console.warn(`Failed to scan directory ${fullDir}:`, error);
|
|
39
|
+
}
|
|
40
|
+
}
|
|
41
|
+
}
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
export {};
|