quackstack 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/LICENSE +21 -0
- package/README.md +288 -0
- package/dist/cli.cjs +41 -0
- package/dist/commands/ingest.js +42 -0
- package/dist/commands/search.js +33 -0
- package/dist/lib/ai-provider.js +146 -0
- package/dist/lib/answer.js +49 -0
- package/dist/lib/chunker.js +80 -0
- package/dist/lib/cursor-context.js +94 -0
- package/dist/lib/database.js +41 -0
- package/dist/lib/embeddings.js +15 -0
- package/dist/lib/scanner.js +57 -0
- package/dist/repl.js +70 -0
- package/dist/test/tests.js +17 -0
- package/package.json +69 -0
- package/prisma/migrations/20251002215048_init/migration.sql +19 -0
- package/prisma/migrations/20251002220417_embedding_json/migration.sql +9 -0
- package/prisma/migrations/20251002222539_code_snipper/migration.sql +28 -0
- package/prisma/migrations/migration_lock.toml +3 -0
- package/prisma/schema.prisma +30 -0
package/LICENSE
ADDED
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2025 woustachemax
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
package/README.md
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
1
|
+
# QuackStack 🐥
|
|
2
|
+
|
|
3
|
+
**Your cracked unpaid intern for all things codebase related!**
|
|
4
|
+
|
|
5
|
+
QuackStack is an interactive CLI tool that indexes your codebase using AI embeddings and lets you ask questions about it conversationally. Perfect for understanding unfamiliar code, onboarding to new projects, or giving your AI coding assistant persistent context.
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
## ✨ Features
|
|
9
|
+
|
|
10
|
+
* 🚀 **Zero-config** - Just run `quack` in any project directory
|
|
11
|
+
* 🧠 **Smart code parsing** - Automatically extracts functions and classes
|
|
12
|
+
* 💬 **Interactive REPL** - Ask questions conversationally, stays open until Ctrl+C
|
|
13
|
+
* 🤖 **Multi-AI support** - Works with OpenAI, Claude, Gemini, DeepSeek, or Mistral
|
|
14
|
+
* 🎯 **Cursor integration** - Auto-generate `.cursorrules` for Cursor AI
|
|
15
|
+
* 📦 **Local database** - Your code stays on your infrastructure
|
|
16
|
+
* 🌍 **Multi-language** - Supports JS/TS, Python, Go, Rust, Java, C/C++, C#, Ruby, PHP, Swift, Kotlin, and more
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
## 📦 Installation
|
|
20
|
+
|
|
21
|
+
### Global Install (Recommended)
|
|
22
|
+
|
|
23
|
+
```bash
|
|
24
|
+
pnpm add -g quackstack
|
|
25
|
+
|
|
26
|
+
npm install -g quackstack
|
|
27
|
+
```
|
|
28
|
+
|
|
29
|
+
### Local Development
|
|
30
|
+
|
|
31
|
+
```bash
|
|
32
|
+
git clone https://github.com/woustachemax/quackstack.git
|
|
33
|
+
cd quackstack
|
|
34
|
+
pnpm install
|
|
35
|
+
pnpm build
|
|
36
|
+
```
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
## ⚙️ Setup
|
|
40
|
+
|
|
41
|
+
### 1. Create `.env` in your project root
|
|
42
|
+
|
|
43
|
+
```bash
|
|
44
|
+
# REQUIRED!
|
|
45
|
+
QUACKSTACK_DATABASE_URL=postgresql://user:pass@host:port/dbname
|
|
46
|
+
|
|
47
|
+
# Choose ONE AI provider:
|
|
48
|
+
|
|
49
|
+
# Option 1: OpenAI (RECOOMMENDED!)
|
|
50
|
+
QUACKSTACK_OPENAI_KEY=sk-...
|
|
51
|
+
|
|
52
|
+
# Option 2: Anthropic Claude
|
|
53
|
+
QUACKSTACK_ANTHROPIC_KEY=sk-ant-...
|
|
54
|
+
|
|
55
|
+
# Option 3: Google Gemini (has free tier!)
|
|
56
|
+
QUACKSTACK_GEMINI_KEY=AIza...
|
|
57
|
+
|
|
58
|
+
# Option 4: DeepSeek (cheapest option)
|
|
59
|
+
QUACKSTACK_DEEPSEEK_KEY=sk-...
|
|
60
|
+
|
|
61
|
+
# Option 5: Mistral AI
|
|
62
|
+
QUACKSTACK_MISTRAL_KEY=...
|
|
63
|
+
|
|
64
|
+
# NOTE: If using Claude/Gemini/Mistral, you still NEED OpenAI for embeddings:
|
|
65
|
+
QUACKSTACK_EMBEDDING_KEY=sk-...
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
### 2. Initialize database
|
|
69
|
+
|
|
70
|
+
```bash
|
|
71
|
+
npx prisma generate
|
|
72
|
+
npx prisma db push
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
## 🚀 Usage
|
|
77
|
+
|
|
78
|
+
### Interactive Mode (Default)
|
|
79
|
+
|
|
80
|
+
```bash
|
|
81
|
+
quack
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
🐥 Quack! How can I help? > where is authentication handled?
|
|
85
|
+
|
|
86
|
+
# Answer appears with context
|
|
87
|
+
# Press Ctrl+C to exit
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
### Generate Cursor Context
|
|
91
|
+
|
|
92
|
+
```bash
|
|
93
|
+
quack --cursor
|
|
94
|
+
|
|
95
|
+
# Creates .cursorrules file with:
|
|
96
|
+
# - Architecture overview
|
|
97
|
+
# - Main entry points
|
|
98
|
+
# - Key functions and files
|
|
99
|
+
# - Project structure
|
|
100
|
+
|
|
101
|
+
# Cursor AI automatically reads this file!
|
|
102
|
+
```
|
|
103
|
+
|
|
104
|
+
### Watch Mode (Auto-update Cursor)
|
|
105
|
+
|
|
106
|
+
```bash
|
|
107
|
+
quack --watch
|
|
108
|
+
|
|
109
|
+
# Watches for file changes
|
|
110
|
+
# Auto-regenerates .cursorrules
|
|
111
|
+
# Keep running in background during development
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
### Force Reindex
|
|
115
|
+
|
|
116
|
+
```bash
|
|
117
|
+
quack --reindex
|
|
118
|
+
|
|
119
|
+
# Clears old index and re-scans entire codebase
|
|
120
|
+
```
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
## 📖 Example Session
|
|
124
|
+
|
|
125
|
+
```bash
|
|
126
|
+
$ quack
|
|
127
|
+
Welcome to QuackStack! 🐥
|
|
128
|
+
🔍 Indexing your codebase (this may take a moment)...
|
|
129
|
+
✅ Indexing complete!
|
|
130
|
+
|
|
131
|
+
💡 Tip: Press Ctrl+C to exit
|
|
132
|
+
|
|
133
|
+
🐥 Quack! How can I help? > how does the search function work?
|
|
134
|
+
|
|
135
|
+
The search function converts your query to embeddings, compares them
|
|
136
|
+
against stored code embeddings using cosine similarity, ranks results,
|
|
137
|
+
and feeds the top matches to the AI for a conversational answer.
|
|
138
|
+
|
|
139
|
+
Implementation is in src/commands/search.ts
|
|
140
|
+
|
|
141
|
+
💡 Want more details? (y/n) > y
|
|
142
|
+
|
|
143
|
+
📚 Relevant Code:
|
|
144
|
+
|
|
145
|
+
[1] src/commands/search.ts (relevance: 87.3%)
|
|
146
|
+
export async function search(query: string, projectName: string) {
|
|
147
|
+
const queryEmbedding = await aiClient.getEmbeddings(query);
|
|
148
|
+
const snippets = await client.codeSnippet.findMany({
|
|
149
|
+
where: { projectName },
|
|
150
|
+
});
|
|
151
|
+
// ... cosine similarity ranking ...
|
|
152
|
+
}
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
🐥 Quack! How can I help? > where are embeddings generated?
|
|
156
|
+
|
|
157
|
+
Vector embeddings are generated in src/lib/ai-provider.ts using
|
|
158
|
+
the getEmbeddings() method with OpenAI's text-embedding-3-large model.
|
|
159
|
+
|
|
160
|
+
💡 Want more details? (y/n) > n
|
|
161
|
+
|
|
162
|
+
🐥 Quack! How can I help? > ^C
|
|
163
|
+
👋 Happy coding!
|
|
164
|
+
```
|
|
165
|
+
|
|
166
|
+
|
|
167
|
+
## 🛠️ How It Works
|
|
168
|
+
|
|
169
|
+
1. **Scanning** - Finds all code files (ignoring `node_modules`, `.git`, etc.)
|
|
170
|
+
2. **Parsing** - Uses AST parsing to extract functions/classes from JS/TS
|
|
171
|
+
3. **Chunking** - Breaks other languages into logical chunks
|
|
172
|
+
4. **Embedding** - Generates vector embeddings for each code chunk
|
|
173
|
+
5. **Storage** - Saves to your PostgreSQL/Neon database
|
|
174
|
+
6. **Search** - Semantic search + AI-powered conversational answers
|
|
175
|
+
|
|
176
|
+
|
|
177
|
+
## 🎯 Use Cases
|
|
178
|
+
|
|
179
|
+
- **Context switching** - Quickly understand projects you haven't touched in months
|
|
180
|
+
- **Onboarding** - New team members can ask questions instead of reading docs
|
|
181
|
+
- **Code archaeology** - Find implementations without grepping
|
|
182
|
+
- **AI coding assistants** - Give Cursor/Claude/ChatGPT persistent codebase context
|
|
183
|
+
- **Documentation** - Auto-generate explanations of how things work
|
|
184
|
+
|
|
185
|
+
|
|
186
|
+
## 📋 Commands Reference
|
|
187
|
+
|
|
188
|
+
| Command | Description |
|
|
189
|
+
|---------|-------------|
|
|
190
|
+
| `quack` | Start interactive REPL (auto-indexes first time) |
|
|
191
|
+
| `quack --cursor` | Generate `.cursorrules` for Cursor AI |
|
|
192
|
+
| `quack --watch` | Watch mode - auto-update Cursor context on file changes |
|
|
193
|
+
| `quack --reindex` | Force reindex the entire codebase |
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
## 🔑 Supported AI Providers
|
|
197
|
+
|
|
198
|
+
| Provider | Chat | Embeddings | Cost | Setup |
|
|
199
|
+
|----------|------|------------|------|-------|
|
|
200
|
+
| OpenAI | ✅ GPT-4o-mini | ✅ | $$ | [Get key](https://platform.openai.com/api-keys) |
|
|
201
|
+
| Anthropic | ✅ Claude 3.5 | ❌ | $$$ | [Get key](https://console.anthropic.com/) |
|
|
202
|
+
| Gemini | ✅ Gemini 1.5 | ❌ | FREE | [Get key](https://aistudio.google.com/app/apikey) |
|
|
203
|
+
| DeepSeek | ✅ | ✅ | $ | [Get key](https://platform.deepseek.com/) |
|
|
204
|
+
| Mistral | ✅ | ❌ | $$ | [Get key](https://console.mistral.ai/) |
|
|
205
|
+
|
|
206
|
+
**Note:** If you use Claude, Gemini, or Mistral for chat, you still need an OpenAI or DeepSeek key for embeddings.
|
|
207
|
+
|
|
208
|
+
|
|
209
|
+
## 🗄️ Database Schema
|
|
210
|
+
|
|
211
|
+
```prisma
|
|
212
|
+
model codeSnippet {
|
|
213
|
+
id Int @id @default(autoincrement())
|
|
214
|
+
content String
|
|
215
|
+
embedding Json
|
|
216
|
+
filePath String
|
|
217
|
+
projectName String
|
|
218
|
+
language String?
|
|
219
|
+
functionName String?
|
|
220
|
+
lineStart Int?
|
|
221
|
+
lineEnd Int?
|
|
222
|
+
createdAt DateTime @default(now())
|
|
223
|
+
updatedAt DateTime @updatedAt
|
|
224
|
+
|
|
225
|
+
@@index([projectName])
|
|
226
|
+
}
|
|
227
|
+
```
|
|
228
|
+
|
|
229
|
+
Each project is isolated by `projectName` (uses current directory name).
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
## 🌍 Supported Languages
|
|
233
|
+
|
|
234
|
+
JavaScript, TypeScript, Python, Go, Rust, Java, C, C++, C#, Ruby, PHP, Swift, Kotlin, Scala, R, Vue, Svelte
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
## 🎓 Development
|
|
238
|
+
|
|
239
|
+
```bash
|
|
240
|
+
git clone https://github.com/woustachemax/quackstack.git
|
|
241
|
+
cd quackstack
|
|
242
|
+
pnpm install
|
|
243
|
+
|
|
244
|
+
pnpm build
|
|
245
|
+
|
|
246
|
+
node dist/cli.cjs
|
|
247
|
+
|
|
248
|
+
node dist/cli.cjs --cursor
|
|
249
|
+
node dist/cli.cjs --watch
|
|
250
|
+
```
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
## 🗺️ Roadmap
|
|
254
|
+
|
|
255
|
+
- [ ] Support more embedding providers (Cohere, Voyage AI)
|
|
256
|
+
- [ ] Add filtering by file type, date range, author
|
|
257
|
+
- [ ] Generate automatic codebase documentation
|
|
258
|
+
- [ ] Export Q&A sessions as markdown docs
|
|
259
|
+
- [ ] VS Code extension
|
|
260
|
+
- [ ] Official Cursor plugin
|
|
261
|
+
- [ ] Support for code diffs and change tracking
|
|
262
|
+
- [ ] Team collaboration features
|
|
263
|
+
|
|
264
|
+
|
|
265
|
+
## 🤝 Contributing
|
|
266
|
+
|
|
267
|
+
Contributions welcome! Feel free to:
|
|
268
|
+
- Report bugs via [GitHub Issues](https://github.com/woustachemax/quackstack/issues)
|
|
269
|
+
- Submit feature requests
|
|
270
|
+
- Open pull requests
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
## 📄 License
|
|
274
|
+
|
|
275
|
+
MIT
|
|
276
|
+
|
|
277
|
+
|
|
278
|
+
## 💡 Pro Tips
|
|
279
|
+
|
|
280
|
+
**Gemini Free Tier**: Start with Google Gemini - it's free and works great for most use cases.
|
|
281
|
+
|
|
282
|
+
**DeepSeek for Production**: If you need cheap embeddings at scale, use DeepSeek (~$0.14 per million tokens).
|
|
283
|
+
|
|
284
|
+
**Cursor Integration**: Run `quack --cursor` once, then `quack --watch &` in the background to keep context always fresh.
|
|
285
|
+
|
|
286
|
+
**Multiple Projects**: Each project gets its own namespace in the database. Just run `quack` in different directories.
|
|
287
|
+
|
|
288
|
+
**Large Codebases**: First index might take a few minutes. After that, only changed files are re-indexed.
|
package/dist/cli.cjs
ADDED
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
"use strict";
|
|
3
|
+
var __importDefault = (this && this.__importDefault) || function (mod) {
|
|
4
|
+
return (mod && mod.__esModule) ? mod : { "default": mod };
|
|
5
|
+
};
|
|
6
|
+
Object.defineProperty(exports, "__esModule", { value: true });
|
|
7
|
+
const commander_1 = require("commander");
|
|
8
|
+
const chalk_animation_1 = __importDefault(require("chalk-animation"));
|
|
9
|
+
const repl_js_1 = require("./repl.js");
|
|
10
|
+
const cursor_context_js_1 = require("./lib/cursor-context.js");
|
|
11
|
+
const path_1 = __importDefault(require("path"));
|
|
12
|
+
const program = new commander_1.Command();
|
|
13
|
+
const PROJECT_NAME = path_1.default.basename(process.cwd());
|
|
14
|
+
program
|
|
15
|
+
.name("quackstack")
|
|
16
|
+
.description("Your cracked unpaid intern for all things codebase!")
|
|
17
|
+
.version("1.0.0")
|
|
18
|
+
.option("-r, --reindex", "Force reindex the codebase")
|
|
19
|
+
.option("-c, --cursor", "Generate .cursorrules file for Cursor AI")
|
|
20
|
+
.option("-w, --watch", "Watch mode: auto-update Cursor context on file changes")
|
|
21
|
+
.action(async (options) => {
|
|
22
|
+
const title = chalk_animation_1.default.rainbow("Welcome to QuackStack! 🐥\n");
|
|
23
|
+
await new Promise(res => setTimeout(res, 1500));
|
|
24
|
+
title.stop();
|
|
25
|
+
if (options.cursor) {
|
|
26
|
+
console.log("🔍 Generating Cursor context...\n");
|
|
27
|
+
await (0, cursor_context_js_1.generateCursorContext)(PROJECT_NAME);
|
|
28
|
+
await (0, cursor_context_js_1.updateCursorGlobalContext)(PROJECT_NAME);
|
|
29
|
+
console.log("\n✅ Cursor integration complete!");
|
|
30
|
+
console.log("💡 Cursor will now have context about your codebase");
|
|
31
|
+
process.exit(0);
|
|
32
|
+
}
|
|
33
|
+
if (options.watch) {
|
|
34
|
+
console.log("👀 Starting watch mode for Cursor context...\n");
|
|
35
|
+
await (0, cursor_context_js_1.generateCursorContext)(PROJECT_NAME);
|
|
36
|
+
(0, cursor_context_js_1.watchAndUpdateCursor)(PROJECT_NAME);
|
|
37
|
+
await new Promise(() => { });
|
|
38
|
+
}
|
|
39
|
+
await (0, repl_js_1.startREPL)(options.reindex);
|
|
40
|
+
});
|
|
41
|
+
program.parse();
|
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import { scanDir } from "../lib/scanner.js";
|
|
4
|
+
import { chunkCode } from "../lib/chunker.js";
|
|
5
|
+
import { aiClient } from "../lib/ai-provider.js";
|
|
6
|
+
import { saveToDB } from "../lib/database.js";
|
|
7
|
+
export async function ingest(rootDir, projectName, silent = false) {
|
|
8
|
+
if (!silent)
|
|
9
|
+
console.log("Starting ingestion...");
|
|
10
|
+
const files = await scanDir(rootDir);
|
|
11
|
+
if (!silent)
|
|
12
|
+
console.log(`Found ${files.length} files to process`);
|
|
13
|
+
let processedCount = 0;
|
|
14
|
+
for (const filePath of files) {
|
|
15
|
+
try {
|
|
16
|
+
const content = fs.readFileSync(filePath, "utf-8");
|
|
17
|
+
const chunks = chunkCode(content, filePath);
|
|
18
|
+
for (const chunk of chunks) {
|
|
19
|
+
const embedding = await aiClient.getEmbeddings(chunk.content);
|
|
20
|
+
await saveToDB({
|
|
21
|
+
content: chunk.content,
|
|
22
|
+
embedding,
|
|
23
|
+
filePath,
|
|
24
|
+
projectName,
|
|
25
|
+
language: path.extname(filePath),
|
|
26
|
+
functionName: chunk.functionName,
|
|
27
|
+
lineStart: chunk.lineStart,
|
|
28
|
+
lineEnd: chunk.lineEnd,
|
|
29
|
+
});
|
|
30
|
+
}
|
|
31
|
+
processedCount++;
|
|
32
|
+
if (!silent && processedCount % 10 === 0) {
|
|
33
|
+
console.log(`Processed ${processedCount}/${files.length} files...`);
|
|
34
|
+
}
|
|
35
|
+
}
|
|
36
|
+
catch (error) {
|
|
37
|
+
console.error(`Error processing ${filePath}:`, error);
|
|
38
|
+
}
|
|
39
|
+
}
|
|
40
|
+
if (!silent)
|
|
41
|
+
console.log(`Done! Processed ${processedCount} files.`);
|
|
42
|
+
}
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
import { client } from "../lib/database.js";
|
|
3
|
+
import { aiClient } from "../lib/ai-provider.js";
|
|
4
|
+
function cosineSim(a, b) {
|
|
5
|
+
let dot = 0, normA = 0, normB = 0;
|
|
6
|
+
for (let i = 0; i < a.length; i++) {
|
|
7
|
+
dot += a[i] * b[i];
|
|
8
|
+
normA += a[i] * a[i];
|
|
9
|
+
normB += b[i] * b[i];
|
|
10
|
+
}
|
|
11
|
+
return dot / (Math.sqrt(normA) * Math.sqrt(normB));
|
|
12
|
+
}
|
|
13
|
+
export async function search(query, projectName) {
|
|
14
|
+
const queryEmbedding = await aiClient.getEmbeddings(query);
|
|
15
|
+
const snippets = await client.codeSnippet.findMany({
|
|
16
|
+
where: { projectName },
|
|
17
|
+
});
|
|
18
|
+
const ranked = snippets
|
|
19
|
+
.map(snippet => ({
|
|
20
|
+
id: snippet.id,
|
|
21
|
+
content: snippet.content,
|
|
22
|
+
filePath: snippet.filePath,
|
|
23
|
+
functionName: snippet.functionName,
|
|
24
|
+
score: cosineSim(queryEmbedding, snippet.embedding),
|
|
25
|
+
}))
|
|
26
|
+
.sort((a, b) => b.score - a.score)
|
|
27
|
+
.slice(0, 5);
|
|
28
|
+
const context = ranked
|
|
29
|
+
.map((r, i) => `[${i + 1}] ${r.filePath}${r.functionName ? ` (${r.functionName})` : ""}\n${r.content}`)
|
|
30
|
+
.join("\n\n---\n\n");
|
|
31
|
+
const answer = await aiClient.generateAnswer(query, context);
|
|
32
|
+
return { answer, sources: ranked };
|
|
33
|
+
}
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import Anthropic from "@anthropic-ai/sdk";
|
|
3
|
+
import { GoogleGenerativeAI } from "@google/generative-ai";
|
|
4
|
+
import dotenv from "dotenv";
|
|
5
|
+
dotenv.config();
|
|
6
|
+
export class AIClient {
|
|
7
|
+
provider;
|
|
8
|
+
openai;
|
|
9
|
+
anthropic;
|
|
10
|
+
gemini;
|
|
11
|
+
deepseek;
|
|
12
|
+
mistral;
|
|
13
|
+
constructor() {
|
|
14
|
+
if (process.env.QUACKSTACK_OPENAI_KEY) {
|
|
15
|
+
this.provider = "openai";
|
|
16
|
+
this.openai = new OpenAI({
|
|
17
|
+
apiKey: process.env.QUACKSTACK_OPENAI_KEY
|
|
18
|
+
});
|
|
19
|
+
}
|
|
20
|
+
else if (process.env.QUACKSTACK_ANTHROPIC_KEY) {
|
|
21
|
+
this.provider = "anthropic";
|
|
22
|
+
this.anthropic = new Anthropic({
|
|
23
|
+
apiKey: process.env.QUACKSTACK_ANTHROPIC_KEY
|
|
24
|
+
});
|
|
25
|
+
}
|
|
26
|
+
else if (process.env.QUACKSTACK_GEMINI_KEY) {
|
|
27
|
+
this.provider = "gemini";
|
|
28
|
+
this.gemini = new GoogleGenerativeAI(process.env.QUACKSTACK_GEMINI_KEY);
|
|
29
|
+
}
|
|
30
|
+
else if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
|
|
31
|
+
this.provider = "deepseek";
|
|
32
|
+
this.deepseek = new OpenAI({
|
|
33
|
+
apiKey: process.env.QUACKSTACK_DEEPSEEK_KEY,
|
|
34
|
+
baseURL: "https://api.deepseek.com/v1"
|
|
35
|
+
});
|
|
36
|
+
}
|
|
37
|
+
else if (process.env.QUACKSTACK_MISTRAL_KEY) {
|
|
38
|
+
this.provider = "mistral";
|
|
39
|
+
this.mistral = new OpenAI({
|
|
40
|
+
apiKey: process.env.QUACKSTACK_MISTRAL_KEY,
|
|
41
|
+
baseURL: "https://api.mistral.ai/v1"
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
else {
|
|
45
|
+
throw new Error("No AI API key found. Please set one of:\n" +
|
|
46
|
+
" QUACKSTACK_OPENAI_KEY (GPT-4)\n" +
|
|
47
|
+
" QUACKSTACK_ANTHROPIC_KEY (Claude)\n" +
|
|
48
|
+
" QUACKSTACK_GEMINI_KEY (Gemini)\n" +
|
|
49
|
+
" QUACKSTACK_DEEPSEEK_KEY (DeepSeek)\n" +
|
|
50
|
+
" QUACKSTACK_MISTRAL_KEY (Mistral)");
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
async getEmbeddings(text) {
|
|
54
|
+
const embeddingKey = process.env.QUACKSTACK_OPENAI_KEY ||
|
|
55
|
+
process.env.QUACKSTACK_EMBEDDING_KEY;
|
|
56
|
+
if (embeddingKey) {
|
|
57
|
+
const client = new OpenAI({ apiKey: embeddingKey });
|
|
58
|
+
const response = await client.embeddings.create({
|
|
59
|
+
model: "text-embedding-3-large",
|
|
60
|
+
input: text
|
|
61
|
+
});
|
|
62
|
+
return response.data[0].embedding;
|
|
63
|
+
}
|
|
64
|
+
if (process.env.QUACKSTACK_DEEPSEEK_KEY) {
|
|
65
|
+
const client = new OpenAI({
|
|
66
|
+
apiKey: process.env.QUACKSTACK_DEEPSEEK_KEY,
|
|
67
|
+
baseURL: "https://api.deepseek.com/v1"
|
|
68
|
+
});
|
|
69
|
+
const response = await client.embeddings.create({
|
|
70
|
+
model: "deepseek-chat",
|
|
71
|
+
input: text
|
|
72
|
+
});
|
|
73
|
+
return response.data[0].embedding;
|
|
74
|
+
}
|
|
75
|
+
throw new Error("Embeddings require an API key. Set one of:\n" +
|
|
76
|
+
" QUACKSTACK_OPENAI_KEY\n" +
|
|
77
|
+
" QUACKSTACK_DEEPSEEK_KEY\n" +
|
|
78
|
+
" QUACKSTACK_EMBEDDING_KEY");
|
|
79
|
+
}
|
|
80
|
+
async generateAnswer(query, context) {
|
|
81
|
+
const systemPrompt = "You are a helpful coding assistant. Answer questions about the codebase using the provided code snippets. Be concise and reference specific files when relevant.";
|
|
82
|
+
const userPrompt = `Code context:\n\n${context}\n\nQuestion: ${query}`;
|
|
83
|
+
if (this.provider === "openai" && this.openai) {
|
|
84
|
+
const response = await this.openai.chat.completions.create({
|
|
85
|
+
model: "gpt-4o-mini",
|
|
86
|
+
messages: [
|
|
87
|
+
{ role: "system", content: systemPrompt },
|
|
88
|
+
{ role: "user", content: userPrompt }
|
|
89
|
+
],
|
|
90
|
+
temperature: 0.3,
|
|
91
|
+
});
|
|
92
|
+
return response.choices[0].message.content || "No response generated.";
|
|
93
|
+
}
|
|
94
|
+
if (this.provider === "anthropic" && this.anthropic) {
|
|
95
|
+
const response = await this.anthropic.messages.create({
|
|
96
|
+
model: "claude-3-5-sonnet-20241022",
|
|
97
|
+
max_tokens: 2048,
|
|
98
|
+
system: systemPrompt,
|
|
99
|
+
messages: [
|
|
100
|
+
{ role: "user", content: userPrompt }
|
|
101
|
+
]
|
|
102
|
+
});
|
|
103
|
+
const textContent = response.content.find(c => c.type === "text");
|
|
104
|
+
return textContent && textContent.type === "text" ? textContent.text : "No response generated.";
|
|
105
|
+
}
|
|
106
|
+
if (this.provider === "gemini" && this.gemini) {
|
|
107
|
+
const model = this.gemini.getGenerativeModel({ model: "gemini-1.5-flash" });
|
|
108
|
+
const result = await model.generateContent(`${systemPrompt}\n\n${userPrompt}`);
|
|
109
|
+
return result.response.text();
|
|
110
|
+
}
|
|
111
|
+
if (this.provider === "deepseek" && this.deepseek) {
|
|
112
|
+
const response = await this.deepseek.chat.completions.create({
|
|
113
|
+
model: "deepseek-chat",
|
|
114
|
+
messages: [
|
|
115
|
+
{ role: "system", content: systemPrompt },
|
|
116
|
+
{ role: "user", content: userPrompt }
|
|
117
|
+
],
|
|
118
|
+
temperature: 0.3,
|
|
119
|
+
});
|
|
120
|
+
return response.choices[0].message.content || "No response generated.";
|
|
121
|
+
}
|
|
122
|
+
if (this.provider === "mistral" && this.mistral) {
|
|
123
|
+
const response = await this.mistral.chat.completions.create({
|
|
124
|
+
model: "mistral-large-latest",
|
|
125
|
+
messages: [
|
|
126
|
+
{ role: "system", content: systemPrompt },
|
|
127
|
+
{ role: "user", content: userPrompt }
|
|
128
|
+
],
|
|
129
|
+
temperature: 0.3,
|
|
130
|
+
});
|
|
131
|
+
return response.choices[0].message.content || "No response generated.";
|
|
132
|
+
}
|
|
133
|
+
throw new Error("No AI provider configured");
|
|
134
|
+
}
|
|
135
|
+
getProviderName() {
|
|
136
|
+
const names = {
|
|
137
|
+
openai: "OpenAI GPT",
|
|
138
|
+
anthropic: "Anthropic Claude",
|
|
139
|
+
gemini: "Google Gemini",
|
|
140
|
+
deepseek: "DeepSeek",
|
|
141
|
+
mistral: "Mistral AI"
|
|
142
|
+
};
|
|
143
|
+
return names[this.provider];
|
|
144
|
+
}
|
|
145
|
+
}
|
|
146
|
+
export const aiClient = new AIClient();
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import dotenv from "dotenv";
|
|
3
|
+
dotenv.config();
|
|
4
|
+
const openai = new OpenAI({
|
|
5
|
+
apiKey: process.env.QUACKSTACK_OPENAI_API_KEY,
|
|
6
|
+
});
|
|
7
|
+
export async function generateAnswer(query, results) {
|
|
8
|
+
const context = results
|
|
9
|
+
.map((r, i) => `[${i + 1}] ${r.filePath}\n${r.content}`)
|
|
10
|
+
.join("\n\n---\n\n");
|
|
11
|
+
const response = await openai.chat.completions.create({
|
|
12
|
+
model: "gpt-4o-mini",
|
|
13
|
+
messages: [
|
|
14
|
+
{
|
|
15
|
+
role: "system",
|
|
16
|
+
content: `
|
|
17
|
+
You are a precise and context-aware coding assistant integrated into a CLI tool.
|
|
18
|
+
Your job is to answer technical questions about a users codebase using the provided file contents, function definitions, or directory structures.
|
|
19
|
+
|
|
20
|
+
Behavior:
|
|
21
|
+
|
|
22
|
+
Always reference specific files, functions, or classes when relevant (e.g., "In db/models.py, the User class...").
|
|
23
|
+
|
|
24
|
+
Be concise and factual.
|
|
25
|
+
|
|
26
|
+
Do not speculate beyond the given code snippets.
|
|
27
|
+
|
|
28
|
+
If context is missing, state clearly whats missing.
|
|
29
|
+
|
|
30
|
+
Treat user queries as about the codebase (e.g., “what does code in my db do?”) and infer meaning from filenames, docstrings, and code structure.
|
|
31
|
+
|
|
32
|
+
Example queries:
|
|
33
|
+
|
|
34
|
+
“what does code in my db do?” → Summarize DB-related modules.
|
|
35
|
+
|
|
36
|
+
“how is authentication implemented?” → Trace relevant functions/files.
|
|
37
|
+
|
|
38
|
+
“where is API routing defined?” → Identify routing logic by filename and function.
|
|
39
|
+
Always format your answers in markdown for clarity.`,
|
|
40
|
+
},
|
|
41
|
+
{
|
|
42
|
+
role: "user",
|
|
43
|
+
content: `Code context:\n\n${context}\n\nQuestion: ${query}`,
|
|
44
|
+
},
|
|
45
|
+
],
|
|
46
|
+
temperature: 0.3,
|
|
47
|
+
});
|
|
48
|
+
return response.choices[0].message.content;
|
|
49
|
+
}
|
|
@@ -0,0 +1,80 @@
|
|
|
1
|
+
import * as parser from "@babel/parser";
|
|
2
|
+
import traverse from "@babel/traverse";
|
|
3
|
+
const traverseDefault = traverse.default || traverse;
|
|
4
|
+
export function chunkCode(code, filePath) {
|
|
5
|
+
const ext = filePath.split(".").pop()?.toLowerCase();
|
|
6
|
+
if (["ts", "js", "tsx", "jsx", "mjs", "cjs"].includes(ext || "")) {
|
|
7
|
+
try {
|
|
8
|
+
return extractJSFunctions(code);
|
|
9
|
+
}
|
|
10
|
+
catch (e) {
|
|
11
|
+
console.warn(`Failed to parse ${filePath}, using simple chunking`);
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
return simpleChunk(code);
|
|
15
|
+
}
|
|
16
|
+
function extractJSFunctions(code) {
|
|
17
|
+
const chunks = [];
|
|
18
|
+
try {
|
|
19
|
+
const ast = parser.parse(code, {
|
|
20
|
+
sourceType: "module",
|
|
21
|
+
plugins: ["typescript", "jsx"],
|
|
22
|
+
errorRecovery: true,
|
|
23
|
+
});
|
|
24
|
+
traverseDefault(ast, {
|
|
25
|
+
FunctionDeclaration(path) {
|
|
26
|
+
if (path.node.start !== null && path.node.end !== null) {
|
|
27
|
+
chunks.push({
|
|
28
|
+
content: code.slice(path.node.start, path.node.end),
|
|
29
|
+
functionName: path.node.id?.name,
|
|
30
|
+
lineStart: path.node.loc?.start.line,
|
|
31
|
+
lineEnd: path.node.loc?.end.line,
|
|
32
|
+
});
|
|
33
|
+
}
|
|
34
|
+
},
|
|
35
|
+
ClassDeclaration(path) {
|
|
36
|
+
if (path.node.start !== null && path.node.end !== null) {
|
|
37
|
+
chunks.push({
|
|
38
|
+
content: code.slice(path.node.start, path.node.end),
|
|
39
|
+
functionName: path.node.id?.name,
|
|
40
|
+
lineStart: path.node.loc?.start.line,
|
|
41
|
+
lineEnd: path.node.loc?.end.line,
|
|
42
|
+
});
|
|
43
|
+
}
|
|
44
|
+
},
|
|
45
|
+
ArrowFunctionExpression(path) {
|
|
46
|
+
if (path.parent.type === "VariableDeclarator" &&
|
|
47
|
+
path.node.start !== null &&
|
|
48
|
+
path.node.end !== null) {
|
|
49
|
+
const varDeclarator = path.parent;
|
|
50
|
+
const functionName = varDeclarator.id.type === "Identifier"
|
|
51
|
+
? varDeclarator.id.name
|
|
52
|
+
: undefined;
|
|
53
|
+
chunks.push({
|
|
54
|
+
content: code.slice(path.node.start, path.node.end),
|
|
55
|
+
functionName,
|
|
56
|
+
lineStart: path.node.loc?.start.line,
|
|
57
|
+
lineEnd: path.node.loc?.end.line,
|
|
58
|
+
});
|
|
59
|
+
}
|
|
60
|
+
},
|
|
61
|
+
});
|
|
62
|
+
}
|
|
63
|
+
catch (e) {
|
|
64
|
+
return simpleChunk(code);
|
|
65
|
+
}
|
|
66
|
+
return chunks.length > 0 ? chunks : simpleChunk(code);
|
|
67
|
+
}
|
|
68
|
+
function simpleChunk(code, maxLines = 50) {
|
|
69
|
+
const lines = code.split("\n");
|
|
70
|
+
const chunks = [];
|
|
71
|
+
for (let i = 0; i < lines.length; i += maxLines) {
|
|
72
|
+
const chunk = lines.slice(i, i + maxLines).join("\n");
|
|
73
|
+
chunks.push({
|
|
74
|
+
content: chunk,
|
|
75
|
+
lineStart: i + 1,
|
|
76
|
+
lineEnd: Math.min(i + maxLines, lines.length),
|
|
77
|
+
});
|
|
78
|
+
}
|
|
79
|
+
return chunks;
|
|
80
|
+
}
|
|
@@ -0,0 +1,94 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
import os from "os";
|
|
4
|
+
import { search } from "../commands/search.js";
|
|
5
|
+
export async function generateCursorContext(projectName) {
|
|
6
|
+
const cursorRulesPath = path.join(process.cwd(), ".cursorrules");
|
|
7
|
+
const queries = [
|
|
8
|
+
"What is the overall architecture of this project?",
|
|
9
|
+
"What are the main entry points and how does the code flow?",
|
|
10
|
+
"What are the key functions and classes?",
|
|
11
|
+
"What external dependencies and APIs are used?",
|
|
12
|
+
];
|
|
13
|
+
let context = "# QuackStack Codebase Context\n\n";
|
|
14
|
+
context += "This file is auto-generated by QuackStack to provide AI assistants with codebase context.\n\n";
|
|
15
|
+
context += "---\n\n";
|
|
16
|
+
for (const query of queries) {
|
|
17
|
+
try {
|
|
18
|
+
const { answer, sources } = await search(query, projectName);
|
|
19
|
+
context += `## ${query}\n\n${answer}\n\n`;
|
|
20
|
+
if (sources.length > 0) {
|
|
21
|
+
context += "**Key files:**\n";
|
|
22
|
+
sources.slice(0, 3).forEach(s => {
|
|
23
|
+
context += `- ${s.filePath}\n`;
|
|
24
|
+
});
|
|
25
|
+
context += "\n";
|
|
26
|
+
}
|
|
27
|
+
}
|
|
28
|
+
catch (e) {
|
|
29
|
+
console.error(`Error generating context for: ${query}`);
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
context += "---\n\n## Project Structure\n\n";
|
|
33
|
+
context += await getProjectStructure(process.cwd());
|
|
34
|
+
fs.writeFileSync(cursorRulesPath, context, "utf-8");
|
|
35
|
+
console.log(`✅ Generated .cursorrules at ${cursorRulesPath}`);
|
|
36
|
+
return cursorRulesPath;
|
|
37
|
+
}
|
|
38
|
+
export async function updateCursorGlobalContext(projectName) {
|
|
39
|
+
const cursorDir = path.join(os.homedir(), ".cursor");
|
|
40
|
+
if (!fs.existsSync(cursorDir)) {
|
|
41
|
+
fs.mkdirSync(cursorDir, { recursive: true });
|
|
42
|
+
}
|
|
43
|
+
const globalContextPath = path.join(cursorDir, "quackstack-contexts.json");
|
|
44
|
+
let contexts = {};
|
|
45
|
+
if (fs.existsSync(globalContextPath)) {
|
|
46
|
+
contexts = JSON.parse(fs.readFileSync(globalContextPath, "utf-8"));
|
|
47
|
+
}
|
|
48
|
+
const { answer, sources } = await search("Give a brief overview of what this project does and its main components", projectName);
|
|
49
|
+
contexts[projectName] = {
|
|
50
|
+
path: process.cwd(),
|
|
51
|
+
overview: answer,
|
|
52
|
+
topFiles: sources.slice(0, 5).map(s => s.filePath),
|
|
53
|
+
lastUpdated: new Date().toISOString(),
|
|
54
|
+
};
|
|
55
|
+
fs.writeFileSync(globalContextPath, JSON.stringify(contexts, null, 2), "utf-8");
|
|
56
|
+
console.log(`✅ Updated global Cursor context`);
|
|
57
|
+
}
|
|
58
|
+
async function getProjectStructure(dir, prefix = "", maxDepth = 3, currentDepth = 0) {
|
|
59
|
+
if (currentDepth >= maxDepth)
|
|
60
|
+
return "";
|
|
61
|
+
let structure = "";
|
|
62
|
+
const ignoreDirs = ["node_modules", ".git", "dist", "build", ".next"];
|
|
63
|
+
try {
|
|
64
|
+
const entries = fs.readdirSync(dir, { withFileTypes: true })
|
|
65
|
+
.filter(e => !ignoreDirs.includes(e.name))
|
|
66
|
+
.slice(0, 20);
|
|
67
|
+
entries.forEach((entry, index) => {
|
|
68
|
+
const isLast = index === entries.length - 1;
|
|
69
|
+
const connector = isLast ? "└── " : "├── ";
|
|
70
|
+
structure += `${prefix}${connector}${entry.name}\n`;
|
|
71
|
+
if (entry.isDirectory() && currentDepth < maxDepth - 1) {
|
|
72
|
+
const newPrefix = prefix + (isLast ? " " : "│ ");
|
|
73
|
+
structure += getProjectStructure(path.join(dir, entry.name), newPrefix, maxDepth, currentDepth + 1);
|
|
74
|
+
}
|
|
75
|
+
});
|
|
76
|
+
}
|
|
77
|
+
catch (e) {
|
|
78
|
+
}
|
|
79
|
+
return structure;
|
|
80
|
+
}
|
|
81
|
+
export function watchAndUpdateCursor(projectName) {
|
|
82
|
+
let timeout;
|
|
83
|
+
fs.watch(process.cwd(), { recursive: true }, (eventType, filename) => {
|
|
84
|
+
if (!filename || filename.includes("node_modules") || filename.includes(".git")) {
|
|
85
|
+
return;
|
|
86
|
+
}
|
|
87
|
+
clearTimeout(timeout);
|
|
88
|
+
timeout = setTimeout(async () => {
|
|
89
|
+
console.log("📝 Detected changes, updating Cursor context...");
|
|
90
|
+
await generateCursorContext(projectName);
|
|
91
|
+
}, 5000);
|
|
92
|
+
});
|
|
93
|
+
console.log("👀 Watching for file changes...");
|
|
94
|
+
}
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
import { PrismaClient } from "@prisma/client";
|
|
2
|
+
export const client = new PrismaClient();
|
|
3
|
+
export const saveToDB = async (data) => {
|
|
4
|
+
try {
|
|
5
|
+
const result = await client.codeSnippet.create({
|
|
6
|
+
data: {
|
|
7
|
+
...data,
|
|
8
|
+
embedding: data.embedding,
|
|
9
|
+
},
|
|
10
|
+
});
|
|
11
|
+
return result;
|
|
12
|
+
}
|
|
13
|
+
catch (e) {
|
|
14
|
+
if (e instanceof Error) {
|
|
15
|
+
console.error(`Error saving to DB: ${e.message}`);
|
|
16
|
+
}
|
|
17
|
+
else {
|
|
18
|
+
console.error(`Unknown error saving to DB:`, e);
|
|
19
|
+
}
|
|
20
|
+
throw e;
|
|
21
|
+
}
|
|
22
|
+
};
|
|
23
|
+
export const getFromDB = async (projectName) => {
|
|
24
|
+
try {
|
|
25
|
+
const results = await client.codeSnippet.findMany({
|
|
26
|
+
where: {
|
|
27
|
+
projectName: projectName
|
|
28
|
+
}
|
|
29
|
+
});
|
|
30
|
+
return results;
|
|
31
|
+
}
|
|
32
|
+
catch (e) {
|
|
33
|
+
if (e instanceof Error) {
|
|
34
|
+
console.error(`Error fetching from DB: ${e.message}`);
|
|
35
|
+
}
|
|
36
|
+
else {
|
|
37
|
+
console.error(`Unknown error fetching from DB:`, e);
|
|
38
|
+
}
|
|
39
|
+
throw e;
|
|
40
|
+
}
|
|
41
|
+
};
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
import OpenAI from "openai";
|
|
2
|
+
import dotenv from "dotenv";
|
|
3
|
+
dotenv.config();
|
|
4
|
+
const openai = new OpenAI({
|
|
5
|
+
apiKey: process.env.QUACKSTACK_OPENAI_API_KEY,
|
|
6
|
+
});
|
|
7
|
+
export async function getEmbeddings(content) {
|
|
8
|
+
const response = await openai.embeddings.create({
|
|
9
|
+
model: "text-embedding-3-large",
|
|
10
|
+
input: content
|
|
11
|
+
});
|
|
12
|
+
// console.log("Embedding response:", response.data[0].embedding);
|
|
13
|
+
return response.data[0].embedding;
|
|
14
|
+
}
|
|
15
|
+
// getEmbeddings("Hello, world! This is a test embedding.");
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
import fs from "fs";
|
|
2
|
+
import path from "path";
|
|
3
|
+
const IGNORE_DIRS = [
|
|
4
|
+
"node_modules",
|
|
5
|
+
".git",
|
|
6
|
+
"dist",
|
|
7
|
+
"build",
|
|
8
|
+
"target",
|
|
9
|
+
"__pycache__",
|
|
10
|
+
".next",
|
|
11
|
+
".nuxt",
|
|
12
|
+
"coverage",
|
|
13
|
+
".cache",
|
|
14
|
+
"vendor",
|
|
15
|
+
"tmp",
|
|
16
|
+
"temp",
|
|
17
|
+
];
|
|
18
|
+
const DEFAULT_EXTENSIONS = [
|
|
19
|
+
".ts", ".js", ".tsx", ".jsx", ".mjs", ".cjs",
|
|
20
|
+
".py", ".pyw",
|
|
21
|
+
".go",
|
|
22
|
+
".rs",
|
|
23
|
+
".java",
|
|
24
|
+
".c", ".cpp", ".cc", ".cxx", ".h", ".hpp", ".hxx",
|
|
25
|
+
".cs",
|
|
26
|
+
".rb", ".php", ".swift", ".kt", ".kts", ".scala", ".r",
|
|
27
|
+
".vue", ".svelte",
|
|
28
|
+
];
|
|
29
|
+
export async function scanDir(rootDir, exts = DEFAULT_EXTENSIONS) {
|
|
30
|
+
const files = [];
|
|
31
|
+
async function walk(dir) {
|
|
32
|
+
let entries;
|
|
33
|
+
try {
|
|
34
|
+
entries = fs.readdirSync(dir, { withFileTypes: true });
|
|
35
|
+
}
|
|
36
|
+
catch (e) {
|
|
37
|
+
return;
|
|
38
|
+
}
|
|
39
|
+
for (const entry of entries) {
|
|
40
|
+
const fullPath = path.join(dir, entry.name);
|
|
41
|
+
if (entry.isDirectory()) {
|
|
42
|
+
if (IGNORE_DIRS.includes(entry.name)) {
|
|
43
|
+
continue;
|
|
44
|
+
}
|
|
45
|
+
await walk(fullPath);
|
|
46
|
+
}
|
|
47
|
+
else if (entry.isFile()) {
|
|
48
|
+
const ext = path.extname(entry.name);
|
|
49
|
+
if (exts.includes(ext)) {
|
|
50
|
+
files.push(fullPath);
|
|
51
|
+
}
|
|
52
|
+
}
|
|
53
|
+
}
|
|
54
|
+
}
|
|
55
|
+
await walk(rootDir);
|
|
56
|
+
return files;
|
|
57
|
+
}
|
package/dist/repl.js
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
import readline from "readline";
|
|
2
|
+
import { ingest } from "./commands/ingest.js";
|
|
3
|
+
import { search } from "./commands/search.js";
|
|
4
|
+
import { client } from "./lib/database.js";
|
|
5
|
+
import path from "path";
|
|
6
|
+
const PROJECT_NAME = path.basename(process.cwd());
|
|
7
|
+
async function ensureIngested(forceReindex = false) {
|
|
8
|
+
const count = await client.codeSnippet.count({
|
|
9
|
+
where: { projectName: PROJECT_NAME }
|
|
10
|
+
});
|
|
11
|
+
if (count === 0 || forceReindex) {
|
|
12
|
+
if (forceReindex && count > 0) {
|
|
13
|
+
console.log("🗑️ Clearing old index...");
|
|
14
|
+
await client.codeSnippet.deleteMany({
|
|
15
|
+
where: { projectName: PROJECT_NAME }
|
|
16
|
+
});
|
|
17
|
+
}
|
|
18
|
+
console.log("🔍 Indexing your codebase (this may take a moment)...");
|
|
19
|
+
await ingest(process.cwd(), PROJECT_NAME, true);
|
|
20
|
+
console.log("✅ Indexing complete!\n");
|
|
21
|
+
}
|
|
22
|
+
}
|
|
23
|
+
export async function startREPL(forceReindex = false) {
|
|
24
|
+
await ensureIngested(forceReindex);
|
|
25
|
+
const rl = readline.createInterface({
|
|
26
|
+
input: process.stdin,
|
|
27
|
+
output: process.stdout,
|
|
28
|
+
prompt: "🐥 Quack! How can I help? > "
|
|
29
|
+
});
|
|
30
|
+
console.log("💡 Tip: Press Ctrl+C to exit\n");
|
|
31
|
+
rl.prompt();
|
|
32
|
+
rl.on("line", async (input) => {
|
|
33
|
+
const trimmed = input.trim();
|
|
34
|
+
if (!trimmed) {
|
|
35
|
+
rl.prompt();
|
|
36
|
+
return;
|
|
37
|
+
}
|
|
38
|
+
try {
|
|
39
|
+
const { answer, sources } = await search(trimmed, PROJECT_NAME);
|
|
40
|
+
console.log(`\n${answer}\n`);
|
|
41
|
+
await new Promise((resolve) => {
|
|
42
|
+
const detailRL = readline.createInterface({
|
|
43
|
+
input: process.stdin,
|
|
44
|
+
output: process.stdout
|
|
45
|
+
});
|
|
46
|
+
detailRL.question("💡 Want more details? (y/n) > ", (ans) => {
|
|
47
|
+
if (ans.toLowerCase() === "y") {
|
|
48
|
+
console.log("\n📚 Relevant Code:\n");
|
|
49
|
+
sources.slice(0, 3).forEach((r, i) => {
|
|
50
|
+
console.log(`[${i + 1}] ${r.filePath} (relevance: ${(r.score * 100).toFixed(1)}%)`);
|
|
51
|
+
console.log(`${r.content}\n`);
|
|
52
|
+
console.log("---\n");
|
|
53
|
+
});
|
|
54
|
+
}
|
|
55
|
+
detailRL.close();
|
|
56
|
+
console.log();
|
|
57
|
+
resolve();
|
|
58
|
+
});
|
|
59
|
+
});
|
|
60
|
+
}
|
|
61
|
+
catch (error) {
|
|
62
|
+
console.error("❌ Error:", error instanceof Error ? error.message : "Unknown error");
|
|
63
|
+
}
|
|
64
|
+
rl.prompt();
|
|
65
|
+
});
|
|
66
|
+
rl.on("close", () => {
|
|
67
|
+
console.log("\n👋 Happy coding!");
|
|
68
|
+
process.exit(0);
|
|
69
|
+
});
|
|
70
|
+
}
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
// import { getfromDB, saveToDB } from "../lib/database.js";
|
|
2
|
+
export {};
|
|
3
|
+
// async function main() {
|
|
4
|
+
// await saveToDB({
|
|
5
|
+
// content: "function hello() { console.log('hi'); }",
|
|
6
|
+
// embedding: [0.012, -0.034, 0.056],
|
|
7
|
+
// filePath: "src/hello.ts",
|
|
8
|
+
// projectName: "test-project",
|
|
9
|
+
// language: "typescript",
|
|
10
|
+
// lineStart: 1,
|
|
11
|
+
// lineEnd: 3,
|
|
12
|
+
// });
|
|
13
|
+
// }
|
|
14
|
+
// // async function main() {
|
|
15
|
+
// // await getfromDB("test-project");
|
|
16
|
+
// // }
|
|
17
|
+
// main();
|
package/package.json
ADDED
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
{
|
|
2
|
+
"name": "quackstack",
|
|
3
|
+
"version": "1.0.0",
|
|
4
|
+
"description": "Your cracked unpaid intern for all things codebase related! AI-powered codebase search and Q&A.",
|
|
5
|
+
"type": "module",
|
|
6
|
+
"main": "dist/cli.cjs",
|
|
7
|
+
"bin": {
|
|
8
|
+
"quack": "./dist/cli.cjs"
|
|
9
|
+
},
|
|
10
|
+
"files": [
|
|
11
|
+
"dist",
|
|
12
|
+
"prisma",
|
|
13
|
+
"README.md",
|
|
14
|
+
"LICENSE"
|
|
15
|
+
],
|
|
16
|
+
"publishConfig": {
|
|
17
|
+
"access": "public"
|
|
18
|
+
},
|
|
19
|
+
"scripts": {
|
|
20
|
+
"build": "tsc -b",
|
|
21
|
+
"dev": "tsc -b && node dist/cli.cjs",
|
|
22
|
+
"prepare": "prisma generate",
|
|
23
|
+
"test": "echo \"Error: no test specified\" && exit 1"
|
|
24
|
+
},
|
|
25
|
+
"keywords": [
|
|
26
|
+
"cli",
|
|
27
|
+
"ai",
|
|
28
|
+
"embeddings",
|
|
29
|
+
"code-search",
|
|
30
|
+
"codebase",
|
|
31
|
+
"cursor",
|
|
32
|
+
"cursor-ai",
|
|
33
|
+
"semantic-search",
|
|
34
|
+
"code-assistant",
|
|
35
|
+
"openai",
|
|
36
|
+
"claude",
|
|
37
|
+
"gemini",
|
|
38
|
+
"deepseek",
|
|
39
|
+
"mistral"
|
|
40
|
+
],
|
|
41
|
+
"repository": {
|
|
42
|
+
"type": "git",
|
|
43
|
+
"url": "https://github.com/woustachemax/quackstack.git"
|
|
44
|
+
},
|
|
45
|
+
"author": "woustachemax",
|
|
46
|
+
"license": "MIT",
|
|
47
|
+
"engines": {
|
|
48
|
+
"node": ">=20.0.0"
|
|
49
|
+
},
|
|
50
|
+
"dependencies": {
|
|
51
|
+
"@anthropic-ai/sdk": "^0.68.0",
|
|
52
|
+
"@babel/parser": "^7.28.5",
|
|
53
|
+
"@babel/traverse": "^7.28.5",
|
|
54
|
+
"@google/generative-ai": "^0.24.1",
|
|
55
|
+
"@prisma/client": "^6.16.3",
|
|
56
|
+
"chalk": "^5.6.2",
|
|
57
|
+
"chalk-animation": "^2.0.3",
|
|
58
|
+
"commander": "^14.0.1",
|
|
59
|
+
"dotenv": "^17.2.3",
|
|
60
|
+
"openai": "^6.0.1"
|
|
61
|
+
},
|
|
62
|
+
"devDependencies": {
|
|
63
|
+
"@types/babel__traverse": "^7.28.0",
|
|
64
|
+
"@types/chalk-animation": "^1.6.3",
|
|
65
|
+
"@types/node": "^20.0.0",
|
|
66
|
+
"prisma": "^6.16.3",
|
|
67
|
+
"typescript": "^5.9.3"
|
|
68
|
+
}
|
|
69
|
+
}
|
|
@@ -0,0 +1,19 @@
|
|
|
1
|
+
-- CreateTable
|
|
2
|
+
CREATE TABLE "CodeSnippet" (
|
|
3
|
+
"id" SERIAL NOT NULL,
|
|
4
|
+
"content" TEXT NOT NULL,
|
|
5
|
+
"embedding" TEXT NOT NULL,
|
|
6
|
+
"filePath" TEXT NOT NULL,
|
|
7
|
+
"projectName" TEXT NOT NULL,
|
|
8
|
+
"language" TEXT,
|
|
9
|
+
"functionName" TEXT,
|
|
10
|
+
"lineStart" INTEGER,
|
|
11
|
+
"lineEnd" INTEGER,
|
|
12
|
+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
13
|
+
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
14
|
+
|
|
15
|
+
CONSTRAINT "CodeSnippet_pkey" PRIMARY KEY ("id")
|
|
16
|
+
);
|
|
17
|
+
|
|
18
|
+
-- CreateIndex
|
|
19
|
+
CREATE INDEX "CodeSnippet_projectName_idx" ON "CodeSnippet"("projectName");
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Warnings:
|
|
3
|
+
|
|
4
|
+
- Changed the type of `embedding` on the `CodeSnippet` table. No cast exists, the column would be dropped and recreated, which cannot be done if there is data, since the column is required.
|
|
5
|
+
|
|
6
|
+
*/
|
|
7
|
+
-- AlterTable
|
|
8
|
+
ALTER TABLE "CodeSnippet" DROP COLUMN "embedding",
|
|
9
|
+
ADD COLUMN "embedding" JSONB NOT NULL;
|
|
@@ -0,0 +1,28 @@
|
|
|
1
|
+
/*
|
|
2
|
+
Warnings:
|
|
3
|
+
|
|
4
|
+
- You are about to drop the `CodeSnippet` table. If the table is not empty, all the data it contains will be lost.
|
|
5
|
+
|
|
6
|
+
*/
|
|
7
|
+
-- DropTable
|
|
8
|
+
DROP TABLE "public"."CodeSnippet";
|
|
9
|
+
|
|
10
|
+
-- CreateTable
|
|
11
|
+
CREATE TABLE "codeSnippet" (
|
|
12
|
+
"id" SERIAL NOT NULL,
|
|
13
|
+
"content" TEXT NOT NULL,
|
|
14
|
+
"embedding" JSONB NOT NULL,
|
|
15
|
+
"filePath" TEXT NOT NULL,
|
|
16
|
+
"projectName" TEXT NOT NULL,
|
|
17
|
+
"language" TEXT,
|
|
18
|
+
"functionName" TEXT,
|
|
19
|
+
"lineStart" INTEGER,
|
|
20
|
+
"lineEnd" INTEGER,
|
|
21
|
+
"createdAt" TIMESTAMP(3) NOT NULL DEFAULT CURRENT_TIMESTAMP,
|
|
22
|
+
"updatedAt" TIMESTAMP(3) NOT NULL,
|
|
23
|
+
|
|
24
|
+
CONSTRAINT "codeSnippet_pkey" PRIMARY KEY ("id")
|
|
25
|
+
);
|
|
26
|
+
|
|
27
|
+
-- CreateIndex
|
|
28
|
+
CREATE INDEX "codeSnippet_projectName_idx" ON "codeSnippet"("projectName");
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
// This is your Prisma schema file,
|
|
2
|
+
// learn more about it in the docs: https://pris.ly/d/prisma-schema
|
|
3
|
+
|
|
4
|
+
// Looking for ways to speed up your queries, or scale easily with your serverless or edge functions?
|
|
5
|
+
// Try Prisma Accelerate: https://pris.ly/cli/accelerate-init
|
|
6
|
+
|
|
7
|
+
generator client {
|
|
8
|
+
provider = "prisma-client-js"
|
|
9
|
+
}
|
|
10
|
+
|
|
11
|
+
datasource db {
|
|
12
|
+
provider = "postgresql"
|
|
13
|
+
url = env("QUACKSTACK_DATABASE_URL")
|
|
14
|
+
}
|
|
15
|
+
|
|
16
|
+
model codeSnippet {
|
|
17
|
+
id Int @id @default(autoincrement())
|
|
18
|
+
content String
|
|
19
|
+
embedding Json
|
|
20
|
+
filePath String
|
|
21
|
+
projectName String
|
|
22
|
+
language String?
|
|
23
|
+
functionName String?
|
|
24
|
+
lineStart Int?
|
|
25
|
+
lineEnd Int?
|
|
26
|
+
createdAt DateTime @default(now())
|
|
27
|
+
updatedAt DateTime @updatedAt
|
|
28
|
+
|
|
29
|
+
@@index([projectName])
|
|
30
|
+
}
|