quackstack 1.0.4 → 1.0.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/dist/cli.cjs CHANGED
@@ -7,33 +7,41 @@ Object.defineProperty(exports, "__esModule", { value: true });
7
7
  const commander_1 = require("commander");
8
8
  const chalk_animation_1 = __importDefault(require("chalk-animation"));
9
9
  const repl_js_1 = require("./repl.js");
10
- const cursor_context_js_1 = require("./lib/cursor-context.js");
10
+ const context_generator_js_1 = require("./lib/context-generator.js");
11
11
  const path_1 = __importDefault(require("path"));
12
12
  const program = new commander_1.Command();
13
13
  const PROJECT_NAME = path_1.default.basename(process.cwd());
14
14
  program
15
15
  .name("quackstack")
16
16
  .description("Your cracked unpaid intern for all things codebase!")
17
- .version("1.0.0")
17
+ .version("1.0.4")
18
18
  .option("-r, --reindex", "Force reindex the codebase")
19
- .option("-c, --cursor", "Generate .cursorrules file for Cursor AI")
20
- .option("-w, --watch", "Watch mode: auto-update Cursor context on file changes")
19
+ .option("-c, --context", "Generate context files for ALL AI coding tools (Cursor, Windsurf, Cline, Continue, Aider)")
20
+ .option("-d, --docs", "Generate CODEBASE.md - universal documentation for any IDE/editor")
21
+ .option("--cursor", "[DEPRECATED] Use --context instead. Generates .cursorrules only")
22
+ .option("-w, --watch", "Watch mode: auto-update context files on file changes")
21
23
  .action(async (options) => {
22
24
  const title = chalk_animation_1.default.rainbow("Welcome to QuackStack! 🐄\n");
23
25
  await new Promise(res => setTimeout(res, 1500));
24
26
  title.stop();
27
+ if (options.context) {
28
+ await (0, context_generator_js_1.generateContextFiles)(PROJECT_NAME);
29
+ await (0, context_generator_js_1.updateGlobalContext)(PROJECT_NAME);
30
+ process.exit(0);
31
+ }
25
32
  if (options.cursor) {
26
- console.log("šŸ” Generating Cursor context...\n");
27
- await (0, cursor_context_js_1.generateCursorContext)(PROJECT_NAME);
28
- await (0, cursor_context_js_1.updateCursorGlobalContext)(PROJECT_NAME);
29
- console.log("\nāœ… Cursor integration complete!");
30
- console.log("šŸ’” Cursor will now have context about your codebase");
33
+ console.log("āš ļø --cursor is deprecated. Use --context to support all AI tools.\n");
34
+ console.log("šŸ” Generating context for AI assistants...\n");
35
+ await (0, context_generator_js_1.generateContextFiles)(PROJECT_NAME);
36
+ await (0, context_generator_js_1.updateGlobalContext)(PROJECT_NAME);
37
+ console.log("\nāœ… Context generation complete!");
38
+ console.log("šŸ’” Your AI coding assistant will now have codebase context");
31
39
  process.exit(0);
32
40
  }
33
41
  if (options.watch) {
34
- console.log("šŸ‘€ Starting watch mode for Cursor context...\n");
35
- await (0, cursor_context_js_1.generateCursorContext)(PROJECT_NAME);
36
- (0, cursor_context_js_1.watchAndUpdateCursor)(PROJECT_NAME);
42
+ console.log("šŸ‘€ Starting watch mode...\n");
43
+ await (0, context_generator_js_1.generateContextFiles)(PROJECT_NAME);
44
+ (0, context_generator_js_1.watchAndUpdateContext)(PROJECT_NAME);
37
45
  await new Promise(() => { });
38
46
  }
39
47
  await (0, repl_js_1.startREPL)(options.reindex);
@@ -2,45 +2,55 @@ import fs from "fs";
2
2
  import path from "path";
3
3
  import { scanDir } from "../lib/scanner.js";
4
4
  import { chunkCode } from "../lib/chunker.js";
5
- import { aiClient } from "../lib/ai-provider.js";
6
5
  import { saveToDB } from "../lib/database.js";
6
+ import { localEmbeddings } from "../lib/local-embeddings.js";
7
7
  export async function ingest(rootDir, projectName, silent = false) {
8
8
  if (!silent)
9
9
  console.log("Starting ingestion...");
10
10
  const files = await scanDir(rootDir);
11
11
  if (!silent)
12
12
  console.log(`Found ${files.length} files to process`);
13
+ const allChunks = [];
14
+ for (const filePath of files) {
15
+ try {
16
+ const content = fs.readFileSync(filePath, "utf-8");
17
+ const chunks = chunkCode(content, filePath);
18
+ chunks.forEach(chunk => {
19
+ allChunks.push({ content: chunk.content, filePath, chunk });
20
+ });
21
+ }
22
+ catch (error) {
23
+ console.error(`Error reading ${filePath}:`, error);
24
+ }
25
+ }
26
+ if (!silent)
27
+ console.log(`Computing embeddings for ${allChunks.length} chunks...`);
28
+ const allContent = allChunks.map(c => c.content);
29
+ localEmbeddings.addDocuments(allContent);
30
+ if (!silent)
31
+ console.log(`Saving to database...`);
32
+ const BATCH_SIZE = 50;
13
33
  let processedCount = 0;
14
- const BATCH_SIZE = 10;
15
- for (let i = 0; i < files.length; i += BATCH_SIZE) {
16
- const batch = files.slice(i, i + BATCH_SIZE);
17
- await Promise.all(batch.map(async (filePath) => {
18
- try {
19
- const content = fs.readFileSync(filePath, "utf-8");
20
- const chunks = chunkCode(content, filePath);
21
- for (const chunk of chunks) {
22
- const embedding = await aiClient.getEmbeddings(chunk.content);
23
- await saveToDB({
24
- content: chunk.content,
25
- embedding,
26
- filePath,
27
- projectName,
28
- language: path.extname(filePath),
29
- functionName: chunk.functionName,
30
- lineStart: chunk.lineStart,
31
- lineEnd: chunk.lineEnd,
32
- });
33
- }
34
- processedCount++;
35
- if (!silent && processedCount % 10 === 0) {
36
- console.log(`Processed ${processedCount}/${files.length} files...`);
37
- }
38
- }
39
- catch (error) {
40
- console.error(`Error processing ${filePath}:`, error);
41
- }
34
+ for (let i = 0; i < allChunks.length; i += BATCH_SIZE) {
35
+ const batch = allChunks.slice(i, i + BATCH_SIZE);
36
+ await Promise.all(batch.map(async ({ content, filePath, chunk }) => {
37
+ const embedding = localEmbeddings.getVector(content);
38
+ await saveToDB({
39
+ content,
40
+ embedding,
41
+ filePath,
42
+ projectName,
43
+ language: path.extname(filePath),
44
+ functionName: chunk.functionName,
45
+ lineStart: chunk.lineStart,
46
+ lineEnd: chunk.lineEnd,
47
+ });
42
48
  }));
49
+ processedCount += batch.length;
50
+ if (!silent && processedCount % 100 === 0) {
51
+ console.log(`Saved ${processedCount}/${allChunks.length} chunks...`);
52
+ }
43
53
  }
44
54
  if (!silent)
45
- console.log(`Done! Processed ${processedCount} files.`);
55
+ console.log(`Done! Processed ${processedCount} chunks from ${files.length} files.`);
46
56
  }
@@ -1,39 +1,33 @@
1
1
  #!/usr/bin/env node
2
- import { aiClient } from "../lib/ai-provider.js";
3
2
  import { client } from "../lib/database.js";
4
- function cosineSim(a, b) {
5
- let dot = 0, normA = 0, normB = 0;
6
- for (let i = 0; i < a.length; i++) {
7
- dot += a[i] * b[i];
8
- normA += a[i] * a[i];
9
- normB += b[i] * b[i];
10
- }
11
- return dot / (Math.sqrt(normA) * Math.sqrt(normB));
12
- }
3
+ import { localEmbeddings } from "../lib/local-embeddings.js";
4
+ import { aiClient } from "../lib/ai-provider.js";
13
5
  export async function search(query, projectName) {
14
- const queryEmbedding = await aiClient.getEmbeddings(query);
15
6
  const snippets = await client.codeSnippet.findMany({
16
7
  where: { projectName },
17
8
  });
18
- const scored = snippets.map(snippet => ({
9
+ const allContent = snippets.map(s => s.content);
10
+ localEmbeddings.addDocuments(allContent);
11
+ const queryVector = localEmbeddings.getVector(query);
12
+ const ranked = snippets
13
+ .map(snippet => ({
19
14
  id: snippet.id,
20
15
  content: snippet.content,
21
16
  filePath: snippet.filePath,
22
17
  functionName: snippet.functionName,
23
- score: cosineSim(queryEmbedding, snippet.embedding),
24
- }));
25
- scored.sort((a, b) => b.score - a.score);
18
+ score: localEmbeddings.cosineSimilarity(queryVector, snippet.embedding),
19
+ }))
20
+ .sort((a, b) => b.score - a.score);
26
21
  const seenFiles = new Set();
27
- const ranked = scored.filter(item => {
28
- if (seenFiles.has(item.filePath)) {
22
+ const uniqueResults = ranked.filter(item => {
23
+ if (seenFiles.has(item.filePath))
29
24
  return false;
30
- }
31
25
  seenFiles.add(item.filePath);
32
26
  return true;
33
27
  }).slice(0, 5);
34
- const context = ranked
28
+ const context = uniqueResults
35
29
  .map((r, i) => `[${i + 1}] ${r.filePath}${r.functionName ? ` (${r.functionName})` : ""}\n${r.content}`)
36
30
  .join("\n\n---\n\n");
37
31
  const answer = await aiClient.generateAnswer(query, context);
38
- return { answer, sources: ranked };
32
+ return { answer, sources: uniqueResults };
39
33
  }
@@ -11,126 +11,160 @@ export class AIClient {
11
11
  deepseek;
12
12
  mistral;
13
13
  constructor() {
14
+ const config = this.detectProvider();
15
+ this.provider = config.provider;
16
+ this.initializeClient(config);
17
+ }
18
+ detectProvider() {
14
19
  if (process.env.QUACKSTACK_OPENAI_KEY) {
15
- this.provider = "openai";
16
- this.openai = new OpenAI({
17
- apiKey: process.env.QUACKSTACK_OPENAI_KEY
18
- });
20
+ return {
21
+ provider: "openai",
22
+ apiKey: process.env.QUACKSTACK_OPENAI_KEY,
23
+ };
19
24
  }
20
- else if (process.env.QUACKSTACK_ANTHROPIC_KEY) {
21
- this.provider = "anthropic";
22
- this.anthropic = new Anthropic({
23
- apiKey: process.env.QUACKSTACK_ANTHROPIC_KEY
24
- });
25
+ if (process.env.QUACKSTACK_ANTHROPIC_KEY) {
26
+ return {
27
+ provider: "anthropic",
28
+ apiKey: process.env.QUACKSTACK_ANTHROPIC_KEY,
29
+ };
25
30
  }
26
- else if (process.env.QUACKSTACK_GEMINI_KEY) {
27
- this.provider = "gemini";
28
- this.gemini = new GoogleGenerativeAI(process.env.QUACKSTACK_GEMINI_KEY);
31
+ if (process.env.QUACKSTACK_GEMINI_KEY) {
32
+ return {
33
+ provider: "gemini",
34
+ apiKey: process.env.QUACKSTACK_GEMINI_KEY,
35
+ };
29
36
  }
30
- else if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
31
- this.provider = "deepseek";
32
- this.deepseek = new OpenAI({
37
+ if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
38
+ return {
39
+ provider: "deepseek",
33
40
  apiKey: process.env.QUACKSTACK_DEEPSEEK_KEY,
34
- baseURL: "https://api.deepseek.com/v1"
35
- });
41
+ };
36
42
  }
37
- else if (process.env.QUACKSTACK_MISTRAL_KEY) {
38
- this.provider = "mistral";
39
- this.mistral = new OpenAI({
43
+ if (process.env.QUACKSTACK_MISTRAL_KEY) {
44
+ return {
45
+ provider: "mistral",
40
46
  apiKey: process.env.QUACKSTACK_MISTRAL_KEY,
41
- baseURL: "https://api.mistral.ai/v1"
42
- });
43
- }
44
- else {
45
- throw new Error("No AI API key found. Please set one of:\n" +
46
- " QUACKSTACK_OPENAI_KEY (GPT-4)\n" +
47
- " QUACKSTACK_ANTHROPIC_KEY (Claude)\n" +
48
- " QUACKSTACK_GEMINI_KEY (Gemini)\n" +
49
- " QUACKSTACK_DEEPSEEK_KEY (DeepSeek)\n" +
50
- " QUACKSTACK_MISTRAL_KEY (Mistral)");
47
+ };
51
48
  }
49
+ throw new Error("No AI API key found. Please set one of:\n" +
50
+ " QUACKSTACK_OPENAI_KEY - OpenAI GPT-4\n" +
51
+ " QUACKSTACK_ANTHROPIC_KEY - Anthropic Claude\n" +
52
+ " QUACKSTACK_GEMINI_KEY - Google Gemini (free tier available)\n" +
53
+ " QUACKSTACK_DEEPSEEK_KEY - DeepSeek (cheapest option)\n" +
54
+ " QUACKSTACK_MISTRAL_KEY - Mistral AI\n\n" +
55
+ "Get your API key from the respective provider's website.");
52
56
  }
53
- async getEmbeddings(text) {
54
- const embeddingKey = process.env.QUACKSTACK_OPENAI_KEY ||
55
- process.env.QUACKSTACK_EMBEDDING_KEY;
56
- if (embeddingKey) {
57
- const client = new OpenAI({ apiKey: embeddingKey });
58
- const response = await client.embeddings.create({
59
- model: "text-embedding-3-large",
60
- input: text
61
- });
62
- return response.data[0].embedding;
63
- }
64
- if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
65
- const client = new OpenAI({
66
- apiKey: process.env.QUACKSTACK_DEEPSEEK_KEY,
67
- baseURL: "https://api.deepseek.com/v1"
68
- });
69
- const response = await client.embeddings.create({
70
- model: "deepseek-chat",
71
- input: text
72
- });
73
- return response.data[0].embedding;
57
+ initializeClient(config) {
58
+ switch (config.provider) {
59
+ case "openai":
60
+ this.openai = new OpenAI({ apiKey: config.apiKey });
61
+ break;
62
+ case "anthropic":
63
+ this.anthropic = new Anthropic({ apiKey: config.apiKey });
64
+ break;
65
+ case "gemini":
66
+ this.gemini = new GoogleGenerativeAI(config.apiKey);
67
+ break;
68
+ case "deepseek":
69
+ this.deepseek = new OpenAI({
70
+ apiKey: config.apiKey,
71
+ baseURL: "https://api.deepseek.com/v1",
72
+ });
73
+ break;
74
+ case "mistral":
75
+ this.mistral = new OpenAI({
76
+ apiKey: config.apiKey,
77
+ baseURL: "https://api.mistral.ai/v1",
78
+ });
79
+ break;
74
80
  }
75
- throw new Error("Embeddings require an API key. Set one of:\n" +
76
- " QUACKSTACK_OPENAI_KEY\n" +
77
- " QUACKSTACK_DEEPSEEK_KEY\n" +
78
- " QUACKSTACK_EMBEDDING_KEY");
79
81
  }
80
82
  async generateAnswer(query, context) {
81
- const systemPrompt = "You are a helpful coding assistant. Answer questions about the codebase using the provided code snippets. Be concise and reference specific files when relevant.";
83
+ const systemPrompt = "You are a helpful coding assistant. Answer questions about the codebase using the provided code snippets. " +
84
+ "Be concise and reference specific files when relevant. Format your responses in markdown for clarity.";
82
85
  const userPrompt = `Code context:\n\n${context}\n\nQuestion: ${query}`;
83
- if (this.provider === "openai" && this.openai) {
84
- const response = await this.openai.chat.completions.create({
85
- model: "gpt-4o-mini",
86
- messages: [
87
- { role: "system", content: systemPrompt },
88
- { role: "user", content: userPrompt }
89
- ],
90
- temperature: 0.3,
91
- });
92
- return response.choices[0].message.content || "No response generated.";
93
- }
94
- if (this.provider === "anthropic" && this.anthropic) {
95
- const response = await this.anthropic.messages.create({
96
- model: "claude-3-5-sonnet-20241022",
97
- max_tokens: 2048,
98
- system: systemPrompt,
99
- messages: [
100
- { role: "user", content: userPrompt }
101
- ]
102
- });
103
- const textContent = response.content.find(c => c.type === "text");
104
- return textContent && textContent.type === "text" ? textContent.text : "No response generated.";
105
- }
106
- if (this.provider === "gemini" && this.gemini) {
107
- const model = this.gemini.getGenerativeModel({ model: "gemini-1.5-flash" });
108
- const result = await model.generateContent(`${systemPrompt}\n\n${userPrompt}`);
109
- return result.response.text();
86
+ try {
87
+ switch (this.provider) {
88
+ case "openai":
89
+ return await this.generateOpenAI(systemPrompt, userPrompt);
90
+ case "anthropic":
91
+ return await this.generateAnthropic(systemPrompt, userPrompt);
92
+ case "gemini":
93
+ return await this.generateGemini(systemPrompt, userPrompt);
94
+ case "deepseek":
95
+ return await this.generateDeepSeek(systemPrompt, userPrompt);
96
+ case "mistral":
97
+ return await this.generateMistral(systemPrompt, userPrompt);
98
+ default:
99
+ throw new Error(`Unsupported provider: ${this.provider}`);
100
+ }
110
101
  }
111
- if (this.provider === "deepseek" && this.deepseek) {
112
- const response = await this.deepseek.chat.completions.create({
113
- model: "deepseek-chat",
114
- messages: [
115
- { role: "system", content: systemPrompt },
116
- { role: "user", content: userPrompt }
117
- ],
118
- temperature: 0.3,
119
- });
120
- return response.choices[0].message.content || "No response generated.";
102
+ catch (error) {
103
+ if (error instanceof Error) {
104
+ throw new Error(`AI generation failed: ${error.message}`);
105
+ }
106
+ throw new Error("AI generation failed with unknown error");
121
107
  }
122
- if (this.provider === "mistral" && this.mistral) {
123
- const response = await this.mistral.chat.completions.create({
124
- model: "mistral-large-latest",
125
- messages: [
126
- { role: "system", content: systemPrompt },
127
- { role: "user", content: userPrompt }
128
- ],
129
- temperature: 0.3,
130
- });
131
- return response.choices[0].message.content || "No response generated.";
132
- }
133
- throw new Error("No AI provider configured");
108
+ }
109
+ async generateOpenAI(systemPrompt, userPrompt) {
110
+ if (!this.openai)
111
+ throw new Error("OpenAI client not initialized");
112
+ const response = await this.openai.chat.completions.create({
113
+ model: "gpt-4o-mini",
114
+ messages: [
115
+ { role: "system", content: systemPrompt },
116
+ { role: "user", content: userPrompt },
117
+ ],
118
+ temperature: 0.3,
119
+ });
120
+ return response.choices[0].message.content || "No response generated.";
121
+ }
122
+ async generateAnthropic(systemPrompt, userPrompt) {
123
+ if (!this.anthropic)
124
+ throw new Error("Anthropic client not initialized");
125
+ const response = await this.anthropic.messages.create({
126
+ model: "claude-3-5-sonnet-20241022",
127
+ max_tokens: 2048,
128
+ system: systemPrompt,
129
+ messages: [{ role: "user", content: userPrompt }],
130
+ });
131
+ const textContent = response.content.find((c) => c.type === "text");
132
+ return textContent && textContent.type === "text"
133
+ ? textContent.text
134
+ : "No response generated.";
135
+ }
136
+ async generateGemini(systemPrompt, userPrompt) {
137
+ if (!this.gemini)
138
+ throw new Error("Gemini client not initialized");
139
+ const model = this.gemini.getGenerativeModel({ model: "gemini-1.5-flash" });
140
+ const result = await model.generateContent(`${systemPrompt}\n\n${userPrompt}`);
141
+ return result.response.text();
142
+ }
143
+ async generateDeepSeek(systemPrompt, userPrompt) {
144
+ if (!this.deepseek)
145
+ throw new Error("DeepSeek client not initialized");
146
+ const response = await this.deepseek.chat.completions.create({
147
+ model: "deepseek-chat",
148
+ messages: [
149
+ { role: "system", content: systemPrompt },
150
+ { role: "user", content: userPrompt },
151
+ ],
152
+ temperature: 0.3,
153
+ });
154
+ return response.choices[0].message.content || "No response generated.";
155
+ }
156
+ async generateMistral(systemPrompt, userPrompt) {
157
+ if (!this.mistral)
158
+ throw new Error("Mistral client not initialized");
159
+ const response = await this.mistral.chat.completions.create({
160
+ model: "mistral-large-latest",
161
+ messages: [
162
+ { role: "system", content: systemPrompt },
163
+ { role: "user", content: userPrompt },
164
+ ],
165
+ temperature: 0.3,
166
+ });
167
+ return response.choices[0].message.content || "No response generated.";
134
168
  }
135
169
  getProviderName() {
136
170
  const names = {
@@ -138,9 +172,12 @@ export class AIClient {
138
172
  anthropic: "Anthropic Claude",
139
173
  gemini: "Google Gemini",
140
174
  deepseek: "DeepSeek",
141
- mistral: "Mistral AI"
175
+ mistral: "Mistral AI",
142
176
  };
143
177
  return names[this.provider];
144
178
  }
179
+ getProvider() {
180
+ return this.provider;
181
+ }
145
182
  }
146
183
  export const aiClient = new AIClient();
@@ -1,6 +1,6 @@
1
1
  import OpenAI from "openai";
2
2
  import dotenv from "dotenv";
3
- dotenv.config();
3
+ dotenv.config({ quiet: true });
4
4
  const openai = new OpenAI({
5
5
  apiKey: process.env.QUACKSTACK_OPENAI_API_KEY,
6
6
  });
@@ -0,0 +1,207 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import os from "os";
4
+ import { search } from "../commands/search.js";
5
+ export async function generateCodebaseDoc(projectName) {
6
+ console.log("šŸ” Analyzing your codebase...\n");
7
+ const queries = [
8
+ "What is the overall architecture and design patterns used?",
9
+ "What are the main entry points and how does the application start?",
10
+ "What are the core features and functionalities?",
11
+ "What external APIs, services, and dependencies are used?",
12
+ "What is the database schema and data flow?",
13
+ "What are the key algorithms or business logic implementations?",
14
+ ];
15
+ let doc = `# ${projectName} - Codebase Documentation\n\n`;
16
+ doc += `**Auto-generated by QuackStack** | Last updated: ${new Date().toLocaleString()}\n\n`;
17
+ doc += `This document provides a high-level overview of the codebase architecture, key components, and design decisions.\n\n`;
18
+ doc += `---\n\n`;
19
+ doc += `## šŸ“‹ Table of Contents\n\n`;
20
+ queries.forEach((q, i) => {
21
+ const anchor = q.toLowerCase().replace(/[^a-z0-9]+/g, '-');
22
+ doc += `${i + 1}. [${q}](#${anchor})\n`;
23
+ });
24
+ doc += `\n---\n\n`;
25
+ for (const query of queries) {
26
+ try {
27
+ const { answer, sources } = await search(query, projectName);
28
+ doc += `## ${query}\n\n`;
29
+ doc += `${answer}\n\n`;
30
+ if (sources.length > 0) {
31
+ doc += `### šŸ“ Key Files\n\n`;
32
+ sources.slice(0, 5).forEach(s => {
33
+ doc += `- \`${s.filePath}\``;
34
+ if (s.functionName) {
35
+ doc += ` - ${s.functionName}`;
36
+ }
37
+ doc += `\n`;
38
+ });
39
+ doc += `\n`;
40
+ }
41
+ doc += `---\n\n`;
42
+ }
43
+ catch (e) {
44
+ console.error(`Error analyzing: ${query}`);
45
+ }
46
+ }
47
+ doc += `## šŸ“‚ Project Structure\n\n`;
48
+ doc += '```\n';
49
+ doc += await getProjectStructure(process.cwd());
50
+ doc += '```\n\n';
51
+ doc += `---\n\n`;
52
+ doc += `## šŸš€ Getting Started\n\n`;
53
+ doc += `### Prerequisites\n`;
54
+ doc += `Check \`package.json\` for dependencies and Node.js version requirements.\n\n`;
55
+ doc += `### Installation\n`;
56
+ doc += '```bash\n';
57
+ doc += 'npm install\n';
58
+ doc += '```\n\n';
59
+ doc += `### Running the Project\n`;
60
+ doc += `Refer to \`package.json\` scripts section for available commands.\n\n`;
61
+ doc += `---\n\n`;
62
+ doc += `## šŸ”„ Updating This Document\n\n`;
63
+ doc += `This documentation is auto-generated. To regenerate:\n\n`;
64
+ doc += '```bash\n';
65
+ doc += 'quack --docs\n';
66
+ doc += '```\n\n';
67
+ doc += `For interactive codebase Q&A, run \`quack\` in your terminal.\n`;
68
+ const docPath = path.join(process.cwd(), "CODEBASE.md");
69
+ fs.writeFileSync(docPath, doc, "utf-8");
70
+ console.log(`āœ… Generated ${docPath}`);
71
+ }
72
+ export async function generateContextFiles(projectName) {
73
+ console.log("šŸ” Generating context for AI coding assistants...\n");
74
+ const queries = [
75
+ "What is the overall architecture of this project?",
76
+ "What are the main entry points and how does the code flow?",
77
+ "What are the key functions and classes?",
78
+ "What external dependencies and APIs are used?",
79
+ "What is the project structure and organization?",
80
+ ];
81
+ let baseContext = `# ${projectName} - Codebase Context\n\n`;
82
+ baseContext += `Generated: ${new Date().toISOString()}\n\n`;
83
+ baseContext += "This file is auto-generated by QuackStack to provide AI assistants with codebase context.\n\n";
84
+ baseContext += "---\n\n";
85
+ for (const query of queries) {
86
+ try {
87
+ const { answer, sources } = await search(query, projectName);
88
+ baseContext += `## ${query}\n\n${answer}\n\n`;
89
+ if (sources.length > 0) {
90
+ baseContext += "**Key files:**\n";
91
+ sources.slice(0, 3).forEach(s => {
92
+ baseContext += `- ${s.filePath}\n`;
93
+ });
94
+ baseContext += "\n";
95
+ }
96
+ }
97
+ catch (e) {
98
+ console.error(`Error generating context for: ${query}`);
99
+ }
100
+ }
101
+ baseContext += "---\n\n## Project Structure\n\n";
102
+ baseContext += await getProjectStructure(process.cwd());
103
+ await generateCursorRules(baseContext);
104
+ await generateWindsurfContext(baseContext);
105
+ await generateClineContext(baseContext);
106
+ await generateContinueContext(baseContext);
107
+ await generateAiderContext(baseContext);
108
+ console.log("\nāœ… Context files generated for:");
109
+ console.log(" - Cursor (.cursorrules)");
110
+ console.log(" - Windsurf (.windsurfrules)");
111
+ console.log(" - Cline (.clinerules)");
112
+ console.log(" - Continue (.continuerules)");
113
+ console.log(" - Aider (.aider.conf.yml)");
114
+ console.log("\nšŸ’” Your AI coding assistant will now have full codebase context!");
115
+ }
116
+ async function generateCursorRules(context) {
117
+ const cursorPath = path.join(process.cwd(), ".cursorrules");
118
+ fs.writeFileSync(cursorPath, context, "utf-8");
119
+ }
120
+ async function generateWindsurfContext(context) {
121
+ const windsurfPath = path.join(process.cwd(), ".windsurfrules");
122
+ fs.writeFileSync(windsurfPath, context, "utf-8");
123
+ }
124
+ async function generateClineContext(context) {
125
+ const clinePath = path.join(process.cwd(), ".clinerules");
126
+ fs.writeFileSync(clinePath, context, "utf-8");
127
+ }
128
+ async function generateContinueContext(context) {
129
+ const continueDir = path.join(process.cwd(), ".continue");
130
+ if (!fs.existsSync(continueDir)) {
131
+ fs.mkdirSync(continueDir, { recursive: true });
132
+ }
133
+ const continuePath = path.join(continueDir, "context.md");
134
+ fs.writeFileSync(continuePath, context, "utf-8");
135
+ }
136
+ async function generateAiderContext(context) {
137
+ const aiderPath = path.join(process.cwd(), ".aider.conf.yml");
138
+ const aiderConfig = `# Aider configuration with QuackStack context
139
+ # Project: ${path.basename(process.cwd())}
140
+
141
+ # Context file
142
+ read:
143
+ - .aider.context.md
144
+
145
+ # Model settings
146
+ model: gpt-4o-mini
147
+ edit-format: whole
148
+ `;
149
+ fs.writeFileSync(aiderPath, aiderConfig, "utf-8");
150
+ fs.writeFileSync(path.join(process.cwd(), ".aider.context.md"), context, "utf-8");
151
+ }
152
+ export async function updateGlobalContext(projectName) {
153
+ const quackDir = path.join(os.homedir(), ".quackstack");
154
+ if (!fs.existsSync(quackDir)) {
155
+ fs.mkdirSync(quackDir, { recursive: true });
156
+ }
157
+ const contextPath = path.join(quackDir, "contexts.json");
158
+ let contexts = {};
159
+ if (fs.existsSync(contextPath)) {
160
+ contexts = JSON.parse(fs.readFileSync(contextPath, "utf-8"));
161
+ }
162
+ const { answer, sources } = await search("Give a brief overview of what this project does and its main components", projectName);
163
+ contexts[projectName] = {
164
+ path: process.cwd(),
165
+ overview: answer,
166
+ topFiles: sources.slice(0, 5).map(s => s.filePath),
167
+ lastUpdated: new Date().toISOString(),
168
+ };
169
+ fs.writeFileSync(contextPath, JSON.stringify(contexts, null, 2), "utf-8");
170
+ }
171
+ export function watchAndUpdateContext(projectName) {
172
+ let timeout;
173
+ fs.watch(process.cwd(), { recursive: true }, (eventType, filename) => {
174
+ if (!filename || filename.includes("node_modules") || filename.includes(".git")) {
175
+ return;
176
+ }
177
+ clearTimeout(timeout);
178
+ timeout = setTimeout(async () => {
179
+ console.log("šŸ“ Detected changes, updating context...");
180
+ await generateContextFiles(projectName);
181
+ }, 5000);
182
+ });
183
+ console.log("šŸ‘€ Watching for file changes...");
184
+ }
185
+ async function getProjectStructure(dir, prefix = "", maxDepth = 3, currentDepth = 0) {
186
+ if (currentDepth >= maxDepth)
187
+ return "";
188
+ let structure = "";
189
+ const ignoreDirs = ["node_modules", ".git", "dist", "build", ".next"];
190
+ try {
191
+ const entries = fs.readdirSync(dir, { withFileTypes: true })
192
+ .filter(e => !ignoreDirs.includes(e.name))
193
+ .slice(0, 20);
194
+ entries.forEach((entry, index) => {
195
+ const isLast = index === entries.length - 1;
196
+ const connector = isLast ? "└── " : "ā”œā”€ā”€ ";
197
+ structure += `${prefix}${connector}${entry.name}\n`;
198
+ if (entry.isDirectory() && currentDepth < maxDepth - 1) {
199
+ const newPrefix = prefix + (isLast ? " " : "│ ");
200
+ structure += getProjectStructure(path.join(dir, entry.name), newPrefix, maxDepth, currentDepth + 1);
201
+ }
202
+ });
203
+ }
204
+ catch (e) {
205
+ }
206
+ return structure;
207
+ }
@@ -1,6 +1,6 @@
1
1
  import OpenAI from "openai";
2
2
  import dotenv from "dotenv";
3
- dotenv.config();
3
+ dotenv.config({ quiet: true });
4
4
  const openai = new OpenAI({
5
5
  apiKey: process.env.QUACKSTACK_OPENAI_API_KEY,
6
6
  });
@@ -0,0 +1,66 @@
1
+ import crypto from "crypto";
2
+ export class LocalEmbeddings {
3
+ idf = new Map();
4
+ documents = [];
5
+ tokenize(text) {
6
+ return text
7
+ .toLowerCase()
8
+ .replace(/[^a-z0-9_\s]/g, " ")
9
+ .split(/\s+/)
10
+ .filter(t => t.length > 2);
11
+ }
12
+ computeTF(tokens) {
13
+ const tf = new Map();
14
+ tokens.forEach(token => {
15
+ tf.set(token, (tf.get(token) || 0) + 1);
16
+ });
17
+ tokens.forEach(token => {
18
+ tf.set(token, tf.get(token) / tokens.length);
19
+ });
20
+ return tf;
21
+ }
22
+ computeIDF(documents) {
23
+ const docCount = documents.length;
24
+ const termDocCount = new Map();
25
+ documents.forEach(doc => {
26
+ const uniqueTerms = new Set(doc);
27
+ uniqueTerms.forEach(term => {
28
+ termDocCount.set(term, (termDocCount.get(term) || 0) + 1);
29
+ });
30
+ });
31
+ termDocCount.forEach((count, term) => {
32
+ this.idf.set(term, Math.log(docCount / count));
33
+ });
34
+ }
35
+ addDocuments(docs) {
36
+ this.documents = docs;
37
+ const tokenizedDocs = docs.map(d => this.tokenize(d));
38
+ this.computeIDF(tokenizedDocs);
39
+ }
40
+ getVector(text) {
41
+ const tokens = this.tokenize(text);
42
+ const tf = this.computeTF(tokens);
43
+ const vector = [];
44
+ const allTerms = Array.from(this.idf.keys());
45
+ allTerms.forEach(term => {
46
+ const tfValue = tf.get(term) || 0;
47
+ const idfValue = this.idf.get(term) || 0;
48
+ vector.push(tfValue * idfValue);
49
+ });
50
+ return vector;
51
+ }
52
+ cosineSimilarity(a, b) {
53
+ let dot = 0, normA = 0, normB = 0;
54
+ const len = Math.min(a.length, b.length);
55
+ for (let i = 0; i < len; i++) {
56
+ dot += a[i] * b[i];
57
+ normA += a[i] * a[i];
58
+ normB += b[i] * b[i];
59
+ }
60
+ return dot / (Math.sqrt(normA) * Math.sqrt(normB) || 1);
61
+ }
62
+ hash(text) {
63
+ return crypto.createHash("md5").update(text).digest("hex").slice(0, 16);
64
+ }
65
+ }
66
+ export const localEmbeddings = new LocalEmbeddings();
@@ -0,0 +1,66 @@
1
+ import crypto from "crypto";
2
+ export class LocalEmbeddings {
3
+ idf = new Map();
4
+ documents = [];
5
+ tokenize(text) {
6
+ return text
7
+ .toLowerCase()
8
+ .replace(/[^a-z0-9_\s]/g, " ")
9
+ .split(/\s+/)
10
+ .filter(t => t.length > 2);
11
+ }
12
+ computeTF(tokens) {
13
+ const tf = new Map();
14
+ tokens.forEach(token => {
15
+ tf.set(token, (tf.get(token) || 0) + 1);
16
+ });
17
+ tokens.forEach(token => {
18
+ tf.set(token, tf.get(token) / tokens.length);
19
+ });
20
+ return tf;
21
+ }
22
+ computeIDF(documents) {
23
+ const docCount = documents.length;
24
+ const termDocCount = new Map();
25
+ documents.forEach(doc => {
26
+ const uniqueTerms = new Set(doc);
27
+ uniqueTerms.forEach(term => {
28
+ termDocCount.set(term, (termDocCount.get(term) || 0) + 1);
29
+ });
30
+ });
31
+ termDocCount.forEach((count, term) => {
32
+ this.idf.set(term, Math.log(docCount / count));
33
+ });
34
+ }
35
+ addDocuments(docs) {
36
+ this.documents = docs;
37
+ const tokenizedDocs = docs.map(d => this.tokenize(d));
38
+ this.computeIDF(tokenizedDocs);
39
+ }
40
+ getVector(text) {
41
+ const tokens = this.tokenize(text);
42
+ const tf = this.computeTF(tokens);
43
+ const vector = [];
44
+ const allTerms = Array.from(this.idf.keys());
45
+ allTerms.forEach(term => {
46
+ const tfValue = tf.get(term) || 0;
47
+ const idfValue = this.idf.get(term) || 0;
48
+ vector.push(tfValue * idfValue);
49
+ });
50
+ return vector;
51
+ }
52
+ cosineSimilarity(a, b) {
53
+ let dot = 0, normA = 0, normB = 0;
54
+ const len = Math.min(a.length, b.length);
55
+ for (let i = 0; i < len; i++) {
56
+ dot += a[i] * b[i];
57
+ normA += a[i] * a[i];
58
+ normB += b[i] * b[i];
59
+ }
60
+ return dot / (Math.sqrt(normA) * Math.sqrt(normB) || 1);
61
+ }
62
+ hash(text) {
63
+ return crypto.createHash("md5").update(text).digest("hex").slice(0, 16);
64
+ }
65
+ }
66
+ export const localEmbeddings = new LocalEmbeddings();
@@ -0,0 +1,56 @@
1
+ export class SessionManager {
2
+ projectName;
3
+ sessionId;
4
+ conversations = [];
5
+ constructor(projectName) {
6
+ this.projectName = projectName;
7
+ this.sessionId = new Date().toISOString();
8
+ }
9
+ async addConversation(query, answer, provider) {
10
+ this.conversations.push({
11
+ query,
12
+ answer,
13
+ provider,
14
+ timestamp: new Date(),
15
+ });
16
+ await this.saveToDatabase();
17
+ }
18
+ async saveToDatabase() {
19
+ const sessionData = {
20
+ projectName: this.projectName,
21
+ sessionId: this.sessionId,
22
+ conversations: JSON.stringify(this.conversations),
23
+ };
24
+ const fs = await import("fs");
25
+ const path = await import("path");
26
+ const os = await import("os");
27
+ const sessionDir = path.join(os.homedir(), ".quackstack", "sessions");
28
+ if (!fs.existsSync(sessionDir)) {
29
+ fs.mkdirSync(sessionDir, { recursive: true });
30
+ }
31
+ const sessionFile = path.join(sessionDir, `${this.projectName}-${this.sessionId}.json`);
32
+ fs.writeFileSync(sessionFile, JSON.stringify(this.conversations, null, 2));
33
+ }
34
+ getConversationHistory() {
35
+ return this.conversations
36
+ .map(c => `Q: ${c.query}\nA: ${c.answer}\n(via ${c.provider})\n`)
37
+ .join("\n---\n\n");
38
+ }
39
+ async loadPreviousSession() {
40
+ const fs = await import("fs");
41
+ const path = await import("path");
42
+ const os = await import("os");
43
+ const sessionDir = path.join(os.homedir(), ".quackstack", "sessions");
44
+ if (!fs.existsSync(sessionDir))
45
+ return;
46
+ const files = fs.readdirSync(sessionDir)
47
+ .filter(f => f.startsWith(this.projectName))
48
+ .sort()
49
+ .reverse();
50
+ if (files.length > 0) {
51
+ const latestSession = path.join(sessionDir, files[0]);
52
+ const data = JSON.parse(fs.readFileSync(latestSession, "utf-8"));
53
+ this.conversations = data;
54
+ }
55
+ }
56
+ }
package/dist/repl.js CHANGED
@@ -1,4 +1,5 @@
1
1
  import readline from "readline";
2
+ import chalk from "chalk";
2
3
  import { ingest } from "./commands/ingest.js";
3
4
  import { search } from "./commands/search.js";
4
5
  import { client } from "./lib/database.js";
@@ -20,55 +21,59 @@ async function ensureIngested(forceReindex = false) {
20
21
  console.log("āœ… Indexing complete!\n");
21
22
  }
22
23
  }
24
+ function stripMarkdown(text) {
25
+ return text
26
+ .replace(/\*\*(.+?)\*\*/g, chalk.bold('$1'))
27
+ .replace(/\*(.+?)\*/g, chalk.italic('$1'))
28
+ .replace(/`(.+?)`/g, chalk.cyan('$1'))
29
+ .replace(/^#{1,6}\s+(.+)$/gm, chalk.bold.blue('$1'));
30
+ }
23
31
  export async function startREPL(forceReindex = false) {
24
32
  await ensureIngested(forceReindex);
33
+ console.log("šŸ’” Tip: Press Ctrl+C to exit\n");
25
34
  const rl = readline.createInterface({
26
35
  input: process.stdin,
27
36
  output: process.stdout,
28
37
  terminal: true
29
38
  });
30
- console.log("šŸ’” Tip: Press Ctrl+C to exit\n");
31
- const askQuestion = () => {
32
- rl.question("🐄 Quack! How can I help? > ", async (input) => {
33
- const trimmed = input.trim();
34
- if (!trimmed) {
35
- askQuestion();
36
- return;
37
- }
38
- try {
39
- rl.pause();
40
- const { answer, sources } = await search(trimmed, PROJECT_NAME);
41
- console.log(`\n${answer}\n`);
42
- const detailRL = readline.createInterface({
43
- input: process.stdin,
44
- output: process.stdout,
45
- terminal: true
46
- });
47
- detailRL.question("šŸ’” Want more details? (y/n) > ", (ans) => {
48
- if (ans.toLowerCase() === "y") {
49
- console.log("\nšŸ“š Relevant Code:\n");
50
- sources.slice(0, 3).forEach((r, i) => {
51
- console.log(`[${i + 1}] ${r.filePath} (relevance: ${(r.score * 100).toFixed(1)}%)`);
52
- console.log(`${r.content}\n`);
53
- console.log("---\n");
54
- });
55
- }
56
- detailRL.close();
57
- console.log();
58
- rl.resume();
59
- askQuestion();
39
+ let waitingForDetails = false;
40
+ let currentSources = [];
41
+ rl.on("line", async (input) => {
42
+ const trimmed = input.trim().toLowerCase();
43
+ if (waitingForDetails) {
44
+ waitingForDetails = false;
45
+ if (trimmed === "y" || trimmed === "yes") {
46
+ console.log("\nšŸ“š Relevant Code:\n");
47
+ currentSources.slice(0, 3).forEach((r, i) => {
48
+ console.log(chalk.dim(`[${i + 1}] ${r.filePath} (relevance: ${(r.score * 100).toFixed(1)}%)`));
49
+ console.log(chalk.gray(r.content));
50
+ console.log(chalk.dim("---\n"));
60
51
  });
61
52
  }
62
- catch (error) {
63
- console.error("āŒ Error:", error instanceof Error ? error.message : "Unknown error");
64
- rl.resume();
65
- askQuestion();
66
- }
67
- });
68
- };
69
- askQuestion();
53
+ console.log();
54
+ rl.prompt();
55
+ return;
56
+ }
57
+ if (!trimmed) {
58
+ rl.prompt();
59
+ return;
60
+ }
61
+ try {
62
+ const { answer, sources } = await search(input, PROJECT_NAME);
63
+ currentSources = sources;
64
+ console.log("\n" + stripMarkdown(answer) + "\n");
65
+ waitingForDetails = true;
66
+ process.stdout.write("šŸ’” Want more details? (y/n) > ");
67
+ }
68
+ catch (error) {
69
+ console.error(chalk.red("āŒ Error:"), error instanceof Error ? error.message : "Unknown error");
70
+ rl.prompt();
71
+ }
72
+ });
70
73
  rl.on("close", () => {
71
74
  console.log("\nšŸ‘‹ Happy coding!");
72
75
  process.exit(0);
73
76
  });
77
+ rl.setPrompt("🐄 Quack! How can I help? > ");
78
+ rl.prompt();
74
79
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "quackstack",
3
- "version": "1.0.4",
3
+ "version": "1.0.6",
4
4
  "description": "Your cracked unpaid intern for all things codebase related! AI-powered codebase search and Q&A.",
5
5
  "type": "module",
6
6
  "main": "dist/cli.cjs",
@@ -68,6 +68,7 @@
68
68
  "chalk": "^5.6.2",
69
69
  "chalk-animation": "^2.0.3",
70
70
  "commander": "^14.0.1",
71
+ "crypto": "^1.0.1",
71
72
  "dotenv": "^17.2.3",
72
73
  "openai": "^6.0.1",
73
74
  "prisma": "^6.16.3"