quackstack 1.0.12 β 1.0.13
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/cli.cjs +38 -12
- package/dist/commands/search.js +2 -2
- package/dist/lib/ai-provider.js +150 -37
- package/dist/repl.js +79 -12
- package/package.json +1 -1
package/dist/cli.cjs
CHANGED
|
@@ -6,24 +6,51 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
|
6
6
|
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
7
|
const commander_1 = require("commander");
|
|
8
8
|
const chalk_animation_1 = __importDefault(require("chalk-animation"));
|
|
9
|
+
const chalk_1 = __importDefault(require("chalk"));
|
|
9
10
|
const repl_js_1 = require("./repl.js");
|
|
10
11
|
const context_generator_js_1 = require("./lib/context-generator.js");
|
|
11
12
|
const readme_js_1 = require("./commands/readme.js");
|
|
13
|
+
const ai_provider_js_1 = require("./lib/ai-provider.js");
|
|
12
14
|
const path_1 = __importDefault(require("path"));
|
|
13
15
|
const program = new commander_1.Command();
|
|
14
16
|
const PROJECT_NAME = path_1.default.basename(process.cwd());
|
|
15
17
|
program
|
|
16
|
-
.name("
|
|
18
|
+
.name("QuackStackπ₯")
|
|
17
19
|
.description("Your cracked unpaid intern for all things codebase!")
|
|
18
|
-
.version("1.0.
|
|
20
|
+
.version("1.0.5")
|
|
19
21
|
.option("-r, --reindex", "Force reindex the codebase")
|
|
20
|
-
.option("-c, --context", "Generate context files for ALL AI coding tools
|
|
21
|
-
.option("-d, --docs", "Generate CODEBASE.md
|
|
22
|
+
.option("-c, --context", "Generate context files for ALL AI coding tools")
|
|
23
|
+
.option("-d, --docs", "Generate CODEBASE.md")
|
|
22
24
|
.option("--readme", "Generate README.md from your codebase")
|
|
23
|
-
.option("--cursor", "[DEPRECATED] Use --context instead
|
|
25
|
+
.option("--cursor", "[DEPRECATED] Use --context instead")
|
|
24
26
|
.option("-w, --watch", "Watch mode: auto-update context files on file changes")
|
|
27
|
+
.option("-p, --provider <provider>", "AI provider: openai, anthropic, gemini, deepseek, mistral")
|
|
28
|
+
.option("-m, --model <model>", "Specific model to use")
|
|
29
|
+
.option("--list-models", "List available providers and models")
|
|
25
30
|
.action(async (options) => {
|
|
26
|
-
|
|
31
|
+
if (options.listModels) {
|
|
32
|
+
const client = (0, ai_provider_js_1.getAIClient)();
|
|
33
|
+
const providers = client.getAvailableProviders();
|
|
34
|
+
console.log(chalk_1.default.cyan("\nAvailable AI Providers & Models:\n"));
|
|
35
|
+
if (providers.length === 0) {
|
|
36
|
+
console.log(chalk_1.default.red("No API keys configured."));
|
|
37
|
+
process.exit(1);
|
|
38
|
+
}
|
|
39
|
+
providers.forEach(provider => {
|
|
40
|
+
console.log(chalk_1.default.green(`\n${provider.name} (${provider.provider}):`));
|
|
41
|
+
console.log(chalk_1.default.gray(` Default: ${provider.defaultModel}`));
|
|
42
|
+
console.log(chalk_1.default.white(" Available models:"));
|
|
43
|
+
provider.models.forEach(model => {
|
|
44
|
+
const isDefault = model === provider.defaultModel;
|
|
45
|
+
console.log(chalk_1.default.white(` - ${model}${isDefault ? chalk_1.default.gray(" (default)") : ""}`));
|
|
46
|
+
});
|
|
47
|
+
});
|
|
48
|
+
console.log(chalk_1.default.cyan("\nUsage:"));
|
|
49
|
+
console.log(chalk_1.default.white(" quackstack --provider anthropic --model claude-sonnet-4-5-20250929"));
|
|
50
|
+
console.log(chalk_1.default.white(" quackstack -p openai -m gpt-4o\n"));
|
|
51
|
+
process.exit(0);
|
|
52
|
+
}
|
|
53
|
+
const title = chalk_animation_1.default.rainbow("QuackStack\n");
|
|
27
54
|
await new Promise(res => setTimeout(res, 1500));
|
|
28
55
|
title.stop();
|
|
29
56
|
if (options.readme) {
|
|
@@ -36,20 +63,19 @@ program
|
|
|
36
63
|
process.exit(0);
|
|
37
64
|
}
|
|
38
65
|
if (options.cursor) {
|
|
39
|
-
console.log("
|
|
40
|
-
console.log("
|
|
66
|
+
console.log("--cursor is deprecated. Use --context to support all AI tools.\n");
|
|
67
|
+
console.log("Generating context for AI assistants...\n");
|
|
41
68
|
await (0, context_generator_js_1.generateContextFiles)(PROJECT_NAME);
|
|
42
69
|
await (0, context_generator_js_1.updateGlobalContext)(PROJECT_NAME);
|
|
43
|
-
console.log("\
|
|
44
|
-
console.log("π‘ Your AI coding assistant will now have codebase context");
|
|
70
|
+
console.log("\nContext generation complete!");
|
|
45
71
|
process.exit(0);
|
|
46
72
|
}
|
|
47
73
|
if (options.watch) {
|
|
48
|
-
console.log("
|
|
74
|
+
console.log("Starting watch mode...\n");
|
|
49
75
|
await (0, context_generator_js_1.generateContextFiles)(PROJECT_NAME);
|
|
50
76
|
(0, context_generator_js_1.watchAndUpdateContext)(PROJECT_NAME);
|
|
51
77
|
await new Promise(() => { });
|
|
52
78
|
}
|
|
53
|
-
await (0, repl_js_1.startREPL)(options.reindex);
|
|
79
|
+
await (0, repl_js_1.startREPL)(options.reindex, options.provider, options.model);
|
|
54
80
|
});
|
|
55
81
|
program.parse();
|
package/dist/commands/search.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
import { client } from "../lib/database.js";
|
|
3
3
|
import { localEmbeddings } from "../lib/local-embeddings.js";
|
|
4
4
|
import { getAIClient } from "../lib/ai-provider.js";
|
|
5
|
-
export async function search(query, projectName) {
|
|
5
|
+
export async function search(query, projectName, provider, model) {
|
|
6
6
|
const snippets = await client.codeSnippet.findMany({
|
|
7
7
|
where: { projectName },
|
|
8
8
|
});
|
|
@@ -28,7 +28,7 @@ export async function search(query, projectName) {
|
|
|
28
28
|
const context = uniqueResults
|
|
29
29
|
.map((r, i) => `[${i + 1}] ${r.filePath}${r.functionName ? ` (${r.functionName})` : ""}\n${r.content}`)
|
|
30
30
|
.join("\n\n---\n\n");
|
|
31
|
-
const aiClient = getAIClient();
|
|
31
|
+
const aiClient = getAIClient(provider, model);
|
|
32
32
|
const answer = await aiClient.generateAnswer(query, context);
|
|
33
33
|
return { answer, sources: uniqueResults };
|
|
34
34
|
}
|
package/dist/lib/ai-provider.js
CHANGED
|
@@ -5,54 +5,153 @@ import dotenv from "dotenv";
|
|
|
5
5
|
dotenv.config({ quiet: true });
|
|
6
6
|
export class AIClient {
|
|
7
7
|
provider;
|
|
8
|
+
model;
|
|
8
9
|
openai;
|
|
9
10
|
anthropic;
|
|
10
11
|
gemini;
|
|
11
12
|
deepseek;
|
|
12
13
|
mistral;
|
|
13
|
-
constructor() {
|
|
14
|
-
const config = this.detectProvider();
|
|
14
|
+
constructor(providerOverride, modelOverride) {
|
|
15
|
+
const config = this.detectProvider(providerOverride);
|
|
15
16
|
this.provider = config.provider;
|
|
17
|
+
this.model = modelOverride || config.model || this.getDefaultModel(config.provider);
|
|
18
|
+
const availableProvider = this.getAvailableProviders().find(p => p.provider === this.provider);
|
|
19
|
+
if (availableProvider && !availableProvider.models.includes(this.model)) {
|
|
20
|
+
throw new Error(`Model '${this.model}' not available for ${this.provider}.\n` +
|
|
21
|
+
`Available models: ${availableProvider.models.join(', ')}`);
|
|
22
|
+
}
|
|
16
23
|
this.initializeClient(config);
|
|
17
24
|
}
|
|
18
|
-
detectProvider() {
|
|
19
|
-
|
|
25
|
+
detectProvider(override) {
|
|
26
|
+
const availableProviders = this.getAvailableProviders();
|
|
27
|
+
if (availableProviders.length === 0) {
|
|
28
|
+
throw new Error("No AI API key found. Set one of:\n" +
|
|
29
|
+
" QUACKSTACK_OPENAI_KEY\n" +
|
|
30
|
+
" QUACKSTACK_ANTHROPIC_KEY\n" +
|
|
31
|
+
" QUACKSTACK_GEMINI_KEY\n" +
|
|
32
|
+
" QUACKSTACK_DEEPSEEK_KEY\n" +
|
|
33
|
+
" QUACKSTACK_MISTRAL_KEY");
|
|
34
|
+
}
|
|
35
|
+
if (override) {
|
|
36
|
+
const providerConfig = availableProviders.find(p => p.provider === override);
|
|
37
|
+
if (!providerConfig) {
|
|
38
|
+
throw new Error(`Provider '${override}' not available. Set QUACKSTACK_${override.toUpperCase()}_KEY.\n` +
|
|
39
|
+
`Available: ${availableProviders.map(p => p.provider).join(', ')}`);
|
|
40
|
+
}
|
|
20
41
|
return {
|
|
21
|
-
provider:
|
|
22
|
-
apiKey:
|
|
42
|
+
provider: override,
|
|
43
|
+
apiKey: this.getApiKey(override),
|
|
44
|
+
model: providerConfig.defaultModel
|
|
23
45
|
};
|
|
24
46
|
}
|
|
47
|
+
const first = availableProviders[0];
|
|
48
|
+
return {
|
|
49
|
+
provider: first.provider,
|
|
50
|
+
apiKey: this.getApiKey(first.provider),
|
|
51
|
+
model: first.defaultModel
|
|
52
|
+
};
|
|
53
|
+
}
|
|
54
|
+
getApiKey(provider) {
|
|
55
|
+
const keyMap = {
|
|
56
|
+
openai: process.env.QUACKSTACK_OPENAI_KEY || "",
|
|
57
|
+
anthropic: process.env.QUACKSTACK_ANTHROPIC_KEY || "",
|
|
58
|
+
gemini: process.env.QUACKSTACK_GEMINI_KEY || "",
|
|
59
|
+
deepseek: process.env.QUACKSTACK_DEEPSEEK_KEY || "",
|
|
60
|
+
mistral: process.env.QUACKSTACK_MISTRAL_KEY || "",
|
|
61
|
+
};
|
|
62
|
+
return keyMap[provider] || undefined;
|
|
63
|
+
}
|
|
64
|
+
getAvailableProviders() {
|
|
65
|
+
const providers = [];
|
|
66
|
+
if (process.env.QUACKSTACK_OPENAI_KEY) {
|
|
67
|
+
providers.push({
|
|
68
|
+
provider: "openai",
|
|
69
|
+
name: "OpenAI",
|
|
70
|
+
models: ["gpt-4.1",
|
|
71
|
+
"gpt-4.1-mini",
|
|
72
|
+
"gpt-4.1-nano",
|
|
73
|
+
"gpt-4-turbo",
|
|
74
|
+
"gpt-3.5-turbo",
|
|
75
|
+
"gpt-3.5-turbo-0125"],
|
|
76
|
+
defaultModel: "gpt-4o-mini"
|
|
77
|
+
});
|
|
78
|
+
}
|
|
25
79
|
if (process.env.QUACKSTACK_ANTHROPIC_KEY) {
|
|
26
|
-
|
|
80
|
+
providers.push({
|
|
27
81
|
provider: "anthropic",
|
|
28
|
-
|
|
29
|
-
|
|
82
|
+
name: "Anthropic",
|
|
83
|
+
models: ["claude-opus-4",
|
|
84
|
+
"claude-sonnet-4",
|
|
85
|
+
"claude-3.7-sonnet",
|
|
86
|
+
"claude-3.5-haiku"
|
|
87
|
+
],
|
|
88
|
+
defaultModel: "claude-sonnet-4"
|
|
89
|
+
});
|
|
30
90
|
}
|
|
31
91
|
if (process.env.QUACKSTACK_GEMINI_KEY) {
|
|
32
|
-
|
|
92
|
+
providers.push({
|
|
33
93
|
provider: "gemini",
|
|
34
|
-
|
|
35
|
-
|
|
94
|
+
name: "Gemini",
|
|
95
|
+
models: ["gemini-3",
|
|
96
|
+
"gemini-2.5-pro",
|
|
97
|
+
"gemini-2.5-flash",
|
|
98
|
+
"gemini-2.5-flash-lite",
|
|
99
|
+
"gemini-2.5-flash-image",
|
|
100
|
+
"gemini-2.0-spark",
|
|
101
|
+
"gemini-2.0-flash-lite"
|
|
102
|
+
],
|
|
103
|
+
defaultModel: "gemini-2.5-flash"
|
|
104
|
+
});
|
|
36
105
|
}
|
|
37
106
|
if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
|
|
38
|
-
|
|
107
|
+
providers.push({
|
|
39
108
|
provider: "deepseek",
|
|
40
|
-
|
|
41
|
-
|
|
109
|
+
name: "DeepSeek",
|
|
110
|
+
models: ["deepseek-r1",
|
|
111
|
+
"deepseek-r1-zero",
|
|
112
|
+
"deepseek-r1-70b",
|
|
113
|
+
"deepseek-r1-32b",
|
|
114
|
+
"deepseek-r1-14b",
|
|
115
|
+
"deepseek-r1-8b",
|
|
116
|
+
"deepseek-r1-7b",
|
|
117
|
+
"deepseek-r1-1.5b",
|
|
118
|
+
"deepseek-vl2",
|
|
119
|
+
"deepseek-vl2-small",
|
|
120
|
+
"deepseek-vl2-tiny"
|
|
121
|
+
],
|
|
122
|
+
defaultModel: "deepseek-chat"
|
|
123
|
+
});
|
|
42
124
|
}
|
|
43
125
|
if (process.env.QUACKSTACK_MISTRAL_KEY) {
|
|
44
|
-
|
|
126
|
+
providers.push({
|
|
45
127
|
provider: "mistral",
|
|
46
|
-
|
|
47
|
-
|
|
128
|
+
name: "Mistral",
|
|
129
|
+
models: [
|
|
130
|
+
"mistral-large",
|
|
131
|
+
"mistral-medium",
|
|
132
|
+
"mistral-small",
|
|
133
|
+
"mistral-7b",
|
|
134
|
+
"mixtral-8x7b",
|
|
135
|
+
"mixtral-8x22b",
|
|
136
|
+
"codestral",
|
|
137
|
+
"codestral-mamba",
|
|
138
|
+
"mathstral",
|
|
139
|
+
"mistral-nemo-12b"
|
|
140
|
+
],
|
|
141
|
+
defaultModel: "mistral-large-latest"
|
|
142
|
+
});
|
|
48
143
|
}
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
"
|
|
54
|
-
"
|
|
55
|
-
"
|
|
144
|
+
return providers;
|
|
145
|
+
}
|
|
146
|
+
getDefaultModel(provider) {
|
|
147
|
+
const defaults = {
|
|
148
|
+
openai: "gpt-4o-mini",
|
|
149
|
+
anthropic: "claude-3-5-sonnet-20241022",
|
|
150
|
+
gemini: "gemini-1.5-flash",
|
|
151
|
+
deepseek: "deepseek-chat",
|
|
152
|
+
mistral: "mistral-large-latest",
|
|
153
|
+
};
|
|
154
|
+
return defaults[provider];
|
|
56
155
|
}
|
|
57
156
|
initializeClient(config) {
|
|
58
157
|
switch (config.provider) {
|
|
@@ -110,7 +209,7 @@ export class AIClient {
|
|
|
110
209
|
if (!this.openai)
|
|
111
210
|
throw new Error("OpenAI client not initialized");
|
|
112
211
|
const response = await this.openai.chat.completions.create({
|
|
113
|
-
model:
|
|
212
|
+
model: this.model,
|
|
114
213
|
messages: [
|
|
115
214
|
{ role: "system", content: systemPrompt },
|
|
116
215
|
{ role: "user", content: userPrompt },
|
|
@@ -123,7 +222,7 @@ export class AIClient {
|
|
|
123
222
|
if (!this.anthropic)
|
|
124
223
|
throw new Error("Anthropic client not initialized");
|
|
125
224
|
const response = await this.anthropic.messages.create({
|
|
126
|
-
model:
|
|
225
|
+
model: this.model,
|
|
127
226
|
max_tokens: 2048,
|
|
128
227
|
system: systemPrompt,
|
|
129
228
|
messages: [{ role: "user", content: userPrompt }],
|
|
@@ -136,7 +235,7 @@ export class AIClient {
|
|
|
136
235
|
async generateGemini(systemPrompt, userPrompt) {
|
|
137
236
|
if (!this.gemini)
|
|
138
237
|
throw new Error("Gemini client not initialized");
|
|
139
|
-
const model = this.gemini.getGenerativeModel({ model:
|
|
238
|
+
const model = this.gemini.getGenerativeModel({ model: this.model });
|
|
140
239
|
const result = await model.generateContent(`${systemPrompt}\n\n${userPrompt}`);
|
|
141
240
|
return result.response.text();
|
|
142
241
|
}
|
|
@@ -144,7 +243,7 @@ export class AIClient {
|
|
|
144
243
|
if (!this.deepseek)
|
|
145
244
|
throw new Error("DeepSeek client not initialized");
|
|
146
245
|
const response = await this.deepseek.chat.completions.create({
|
|
147
|
-
model:
|
|
246
|
+
model: this.model,
|
|
148
247
|
messages: [
|
|
149
248
|
{ role: "system", content: systemPrompt },
|
|
150
249
|
{ role: "user", content: userPrompt },
|
|
@@ -157,7 +256,7 @@ export class AIClient {
|
|
|
157
256
|
if (!this.mistral)
|
|
158
257
|
throw new Error("Mistral client not initialized");
|
|
159
258
|
const response = await this.mistral.chat.completions.create({
|
|
160
|
-
model:
|
|
259
|
+
model: this.model,
|
|
161
260
|
messages: [
|
|
162
261
|
{ role: "system", content: systemPrompt },
|
|
163
262
|
{ role: "user", content: userPrompt },
|
|
@@ -168,22 +267,36 @@ export class AIClient {
|
|
|
168
267
|
}
|
|
169
268
|
getProviderName() {
|
|
170
269
|
const names = {
|
|
171
|
-
openai: "OpenAI
|
|
172
|
-
anthropic: "Anthropic
|
|
173
|
-
gemini: "
|
|
270
|
+
openai: "OpenAI",
|
|
271
|
+
anthropic: "Anthropic",
|
|
272
|
+
gemini: "Gemini",
|
|
174
273
|
deepseek: "DeepSeek",
|
|
175
|
-
mistral: "Mistral
|
|
274
|
+
mistral: "Mistral",
|
|
176
275
|
};
|
|
177
276
|
return names[this.provider];
|
|
178
277
|
}
|
|
179
278
|
getProvider() {
|
|
180
279
|
return this.provider;
|
|
181
280
|
}
|
|
281
|
+
getModel() {
|
|
282
|
+
return this.model;
|
|
283
|
+
}
|
|
284
|
+
setModel(model) {
|
|
285
|
+
const availableProvider = this.getAvailableProviders().find(p => p.provider === this.provider);
|
|
286
|
+
if (availableProvider && !availableProvider.models.includes(model)) {
|
|
287
|
+
throw new Error(`Model '${model}' not available for ${this.provider}.\n` +
|
|
288
|
+
`Available models: ${availableProvider.models.join(', ')}`);
|
|
289
|
+
}
|
|
290
|
+
this.model = model;
|
|
291
|
+
}
|
|
182
292
|
}
|
|
183
293
|
let aiClientInstance = null;
|
|
184
|
-
export function getAIClient() {
|
|
185
|
-
if (!aiClientInstance) {
|
|
186
|
-
aiClientInstance = new AIClient();
|
|
294
|
+
export function getAIClient(provider, model) {
|
|
295
|
+
if (!aiClientInstance || provider || model) {
|
|
296
|
+
aiClientInstance = new AIClient(provider, model);
|
|
187
297
|
}
|
|
188
298
|
return aiClientInstance;
|
|
189
299
|
}
|
|
300
|
+
export function resetAIClient() {
|
|
301
|
+
aiClientInstance = null;
|
|
302
|
+
}
|
package/dist/repl.js
CHANGED
|
@@ -5,16 +5,20 @@ import { ingest } from "./commands/ingest.js";
|
|
|
5
5
|
import { client } from "./lib/database.js";
|
|
6
6
|
import path from "path";
|
|
7
7
|
import { detectFileChanges, formatChangeMessage } from "./lib/file-change-detector.js";
|
|
8
|
+
import { getAIClient, resetAIClient } from "./lib/ai-provider.js";
|
|
8
9
|
const PROJECT_NAME = path.basename(process.cwd());
|
|
9
|
-
export async function startREPL(forceReindex = false) {
|
|
10
|
-
|
|
10
|
+
export async function startREPL(forceReindex = false, provider, model) {
|
|
11
|
+
const aiClient = getAIClient(provider, model);
|
|
12
|
+
console.log(chalk.cyan(`\nUsing: ${aiClient.getProviderName()} - ${aiClient.getModel()}`));
|
|
13
|
+
console.log(chalk.gray("Run 'quackstack --list-models' to see all options"));
|
|
14
|
+
console.log(chalk.cyan("\nPress Ctrl+C to exit\n"));
|
|
11
15
|
if (!forceReindex) {
|
|
12
16
|
const changes = await detectFileChanges(process.cwd(), PROJECT_NAME);
|
|
13
17
|
if (changes && changes.totalChanges > 0) {
|
|
14
|
-
console.log(chalk.yellow(`\
|
|
18
|
+
console.log(chalk.yellow(`\nDetected ${changes.totalChanges} file change${changes.totalChanges > 1 ? 's' : ''} since last index:`));
|
|
15
19
|
console.log(chalk.yellow(` ${formatChangeMessage(changes)}`));
|
|
16
20
|
console.log(chalk.yellow(` Run 'quack --reindex' for best results.\n`));
|
|
17
|
-
const shouldReindex = await promptUser(chalk.yellow("
|
|
21
|
+
const shouldReindex = await promptUser(chalk.yellow("Reindex now? (y/n) > "));
|
|
18
22
|
if (shouldReindex.toLowerCase() === 'y') {
|
|
19
23
|
forceReindex = true;
|
|
20
24
|
}
|
|
@@ -25,33 +29,38 @@ export async function startREPL(forceReindex = false) {
|
|
|
25
29
|
});
|
|
26
30
|
if (existingCount === 0 || forceReindex) {
|
|
27
31
|
if (forceReindex) {
|
|
28
|
-
console.log(chalk.gray("
|
|
32
|
+
console.log(chalk.gray("Clearing old index..."));
|
|
29
33
|
await client.codeSnippet.deleteMany({
|
|
30
34
|
where: { projectName: PROJECT_NAME },
|
|
31
35
|
});
|
|
32
36
|
}
|
|
33
|
-
console.log(chalk.gray("
|
|
37
|
+
console.log(chalk.gray("Indexing your codebase..."));
|
|
34
38
|
await ingest(process.cwd(), PROJECT_NAME, true);
|
|
35
|
-
console.log(chalk.green("
|
|
39
|
+
console.log(chalk.green("Indexing complete"));
|
|
36
40
|
}
|
|
37
41
|
const rl = readline.createInterface({
|
|
38
42
|
input: process.stdin,
|
|
39
43
|
output: process.stdout,
|
|
40
|
-
prompt: chalk.yellow("
|
|
44
|
+
prompt: chalk.yellow("quack > "),
|
|
41
45
|
});
|
|
42
46
|
rl.prompt();
|
|
43
47
|
rl.on("line", async (line) => {
|
|
44
48
|
const query = line.trim();
|
|
49
|
+
if (query.startsWith("/")) {
|
|
50
|
+
await handleCommand(query, rl);
|
|
51
|
+
rl.prompt();
|
|
52
|
+
return;
|
|
53
|
+
}
|
|
45
54
|
if (!query) {
|
|
46
55
|
rl.prompt();
|
|
47
56
|
return;
|
|
48
57
|
}
|
|
49
58
|
try {
|
|
50
|
-
const { answer, sources } = await search(query, PROJECT_NAME);
|
|
59
|
+
const { answer, sources } = await search(query, PROJECT_NAME, provider, model);
|
|
51
60
|
console.log(chalk.white(`\n${answer}\n`));
|
|
52
|
-
const showDetails = await promptUser(chalk.cyan("
|
|
61
|
+
const showDetails = await promptUser(chalk.cyan("Want more details? (y/n) > "));
|
|
53
62
|
if (showDetails.toLowerCase() === "y") {
|
|
54
|
-
console.log(chalk.blue("\
|
|
63
|
+
console.log(chalk.blue("\nRelevant Code:\n"));
|
|
55
64
|
sources.forEach((src, i) => {
|
|
56
65
|
console.log(chalk.gray(`[${i + 1}] ${src.filePath} (relevance: ${(src.score * 100).toFixed(1)}%)`));
|
|
57
66
|
console.log(chalk.white(src.content));
|
|
@@ -65,10 +74,68 @@ export async function startREPL(forceReindex = false) {
|
|
|
65
74
|
rl.prompt();
|
|
66
75
|
});
|
|
67
76
|
rl.on("close", () => {
|
|
68
|
-
console.log(chalk.gray("\
|
|
77
|
+
console.log(chalk.gray("\nBye"));
|
|
69
78
|
process.exit(0);
|
|
70
79
|
});
|
|
71
80
|
}
|
|
81
|
+
async function handleCommand(command, rl) {
|
|
82
|
+
const [cmd, ...args] = command.slice(1).split(" ");
|
|
83
|
+
switch (cmd) {
|
|
84
|
+
case "model":
|
|
85
|
+
case "m":
|
|
86
|
+
if (args.length === 0) {
|
|
87
|
+
const client = getAIClient();
|
|
88
|
+
console.log(chalk.cyan(`\nCurrent: ${client.getProviderName()} - ${client.getModel()}\n`));
|
|
89
|
+
}
|
|
90
|
+
else {
|
|
91
|
+
const [newModel] = args;
|
|
92
|
+
try {
|
|
93
|
+
resetAIClient();
|
|
94
|
+
const client = getAIClient(undefined, newModel);
|
|
95
|
+
console.log(chalk.green(`\nSwitched to: ${client.getProviderName()} - ${client.getModel()}\n`));
|
|
96
|
+
}
|
|
97
|
+
catch (error) {
|
|
98
|
+
console.log(chalk.red(`\nError: ${error.message}\n`));
|
|
99
|
+
}
|
|
100
|
+
}
|
|
101
|
+
break;
|
|
102
|
+
case "provider":
|
|
103
|
+
case "p":
|
|
104
|
+
if (args.length === 0) {
|
|
105
|
+
const client = getAIClient();
|
|
106
|
+
const providers = client.getAvailableProviders();
|
|
107
|
+
console.log(chalk.cyan("\nAvailable Providers:\n"));
|
|
108
|
+
providers.forEach(p => {
|
|
109
|
+
const current = p.provider === client.getProvider() ? chalk.green(" (current)") : "";
|
|
110
|
+
console.log(chalk.white(` ${p.provider}: ${p.name}${current}`));
|
|
111
|
+
});
|
|
112
|
+
console.log();
|
|
113
|
+
}
|
|
114
|
+
else {
|
|
115
|
+
const [newProvider] = args;
|
|
116
|
+
try {
|
|
117
|
+
resetAIClient();
|
|
118
|
+
const client = getAIClient(newProvider);
|
|
119
|
+
console.log(chalk.green(`\nSwitched to: ${client.getProviderName()} - ${client.getModel()}\n`));
|
|
120
|
+
}
|
|
121
|
+
catch (error) {
|
|
122
|
+
console.log(chalk.red(`\nError: ${error.message}\n`));
|
|
123
|
+
}
|
|
124
|
+
}
|
|
125
|
+
break;
|
|
126
|
+
case "help":
|
|
127
|
+
case "h":
|
|
128
|
+
console.log(chalk.cyan("\nAvailable Commands:\n"));
|
|
129
|
+
console.log(chalk.white(" /model, /m [model] Show or change model"));
|
|
130
|
+
console.log(chalk.white(" /provider, /p [name] Show or change provider"));
|
|
131
|
+
console.log(chalk.white(" /help, /h Show this help"));
|
|
132
|
+
console.log();
|
|
133
|
+
break;
|
|
134
|
+
default:
|
|
135
|
+
console.log(chalk.red(`\nUnknown command: ${cmd}\n`));
|
|
136
|
+
console.log(chalk.gray("Type /help for available commands\n"));
|
|
137
|
+
}
|
|
138
|
+
}
|
|
72
139
|
function promptUser(question) {
|
|
73
140
|
const rl = readline.createInterface({
|
|
74
141
|
input: process.stdin,
|