quackstack 1.0.6 → 1.0.8

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
package/README.md CHANGED
@@ -2,28 +2,27 @@
2
2
 
3
3
  **Your cracked unpaid intern for all things codebase related!**
4
4
 
5
- QuackStack is an interactive CLI tool that indexes your codebase using AI embeddings and lets you ask questions about it conversationally. Perfect for understanding unfamiliar code, onboarding to new projects, or giving your AI coding assistant persistent context.
6
-
5
+ QuackStack is an interactive CLI tool that indexes your codebase using local AI embeddings and lets you ask questions about it conversationally. Perfect for understanding unfamiliar code, onboarding to new projects, or giving your AI coding assistant persistent context.
7
6
 
8
7
  ## ✨ Features
9
8
 
10
9
  * šŸš€ **Zero-config** - Just run `quack` in any project directory
11
10
  * 🧠 **Smart code parsing** - Automatically extracts functions and classes
12
11
  * šŸ’¬ **Interactive REPL** - Ask questions conversationally, stays open until Ctrl+C
13
- * šŸ¤– **Multi-AI support** - Works with OpenAI, Claude, Gemini, DeepSeek, or Mistral
14
- * šŸŽÆ **Cursor integration** - Auto-generate `.cursorrules` for Cursor AI
12
+ * šŸ”’ **100% Local embeddings** - No API calls for vector generation, your code stays private
13
+ * šŸ¤– **AI-powered answers** - Uses OpenAI, Claude, Gemini, DeepSeek, or Mistral for conversational responses
14
+ * šŸŽÆ **Universal AI tool support** - Auto-generate context for Cursor, Windsurf, Cline, Continue, and Aider
15
15
  * šŸ“¦ **Local database** - Your code stays on your infrastructure
16
16
  * šŸŒ **Multi-language** - Supports JS/TS, Python, Go, Rust, Java, C/C++, C#, Ruby, PHP, Swift, Kotlin, and more
17
17
 
18
-
19
18
  ## šŸ“¦ Installation
20
19
 
21
20
  ### Global Install (Recommended)
22
21
 
23
22
  ```bash
24
- pnpm add -g quackstack
25
-
26
23
  npm install -g quackstack
24
+ # or
25
+ pnpm add -g quackstack
27
26
  ```
28
27
 
29
28
  ### Local Development
@@ -35,18 +34,18 @@ pnpm install
35
34
  pnpm build
36
35
  ```
37
36
 
38
-
39
37
  ## āš™ļø Setup
40
38
 
41
39
  ### 1. Create `.env` in your project root
42
40
 
43
41
  ```bash
44
- # REQUIRED!
42
+ # REQUIRED: Database for storing code embeddings
45
43
  QUACKSTACK_DATABASE_URL=postgresql://user:pass@host:port/dbname
46
44
 
47
- # Choose ONE AI provider:
45
+ # REQUIRED: Choose ONE AI provider for conversational answers
46
+ # (Embeddings are computed locally - no API calls!)
48
47
 
49
- # Option 1: OpenAI (RECOOMMENDED!)
48
+ # Option 1: OpenAI (RECOMMENDED)
50
49
  QUACKSTACK_OPENAI_KEY=sk-...
51
50
 
52
51
  # Option 2: Anthropic Claude
@@ -60,9 +59,6 @@ QUACKSTACK_DEEPSEEK_KEY=sk-...
60
59
 
61
60
  # Option 5: Mistral AI
62
61
  QUACKSTACK_MISTRAL_KEY=...
63
-
64
- # NOTE: If using Claude/Gemini/Mistral, you still NEED OpenAI for embeddings:
65
- QUACKSTACK_EMBEDDING_KEY=sk-...
66
62
  ```
67
63
 
68
64
  ### 2. Initialize database
@@ -72,7 +68,6 @@ npx prisma generate
72
68
  npx prisma db push
73
69
  ```
74
70
 
75
-
76
71
  ## šŸš€ Usage
77
72
 
78
73
  ### Interactive Mode (Default)
@@ -80,34 +75,32 @@ npx prisma db push
80
75
  ```bash
81
76
  quack
82
77
 
83
-
84
- 🐄 Quack! How can I help? > where is authentication handled?
85
-
86
78
  # Answer appears with context
87
79
  # Press Ctrl+C to exit
88
80
  ```
89
81
 
90
- ### Generate Cursor Context
82
+ ### Generate Context for ALL AI Coding Tools
91
83
 
92
84
  ```bash
93
- quack --cursor
85
+ quack --context
94
86
 
95
- # Creates .cursorrules file with:
96
- # - Architecture overview
97
- # - Main entry points
98
- # - Key functions and files
99
- # - Project structure
87
+ # Creates context files for:
88
+ # - Cursor (.cursorrules)
89
+ # - Windsurf (.windsurfrules)
90
+ # - Cline (.clinerules)
91
+ # - Continue (.continue/context.md)
92
+ # - Aider (.aider.conf.yml)
100
93
 
101
- # Cursor AI automatically reads this file!
94
+ # Your AI coding assistants automatically read these files!
102
95
  ```
103
96
 
104
- ### Watch Mode (Auto-update Cursor)
97
+ ### Watch Mode (Auto-update Context)
105
98
 
106
99
  ```bash
107
100
  quack --watch
108
101
 
109
102
  # Watches for file changes
110
- # Auto-regenerates .cursorrules
103
+ # Auto-regenerates all context files
111
104
  # Keep running in background during development
112
105
  ```
113
106
 
@@ -119,7 +112,6 @@ quack --reindex
119
112
  # Clears old index and re-scans entire codebase
120
113
  ```
121
114
 
122
-
123
115
  ## šŸ“– Example Session
124
116
 
125
117
  ```bash
@@ -132,8 +124,8 @@ Welcome to QuackStack! 🐄
132
124
 
133
125
  🐄 Quack! How can I help? > how does the search function work?
134
126
 
135
- The search function converts your query to embeddings, compares them
136
- against stored code embeddings using cosine similarity, ranks results,
127
+ The search function uses local embeddings to convert your query into a vector,
128
+ compares it against stored code embeddings using cosine similarity, ranks results,
137
129
  and feeds the top matches to the AI for a conversational answer.
138
130
 
139
131
  Implementation is in src/commands/search.ts
@@ -144,18 +136,16 @@ Implementation is in src/commands/search.ts
144
136
 
145
137
  [1] src/commands/search.ts (relevance: 87.3%)
146
138
  export async function search(query: string, projectName: string) {
147
- const queryEmbedding = await aiClient.getEmbeddings(query);
148
139
  const snippets = await client.codeSnippet.findMany({
149
140
  where: { projectName },
150
141
  });
151
142
  // ... cosine similarity ranking ...
152
143
  }
153
144
 
154
-
155
145
  🐄 Quack! How can I help? > where are embeddings generated?
156
146
 
157
- Vector embeddings are generated in src/lib/ai-provider.ts using
158
- the getEmbeddings() method with OpenAI's text-embedding-3-large model.
147
+ Embeddings are generated locally using the local-embeddings module.
148
+ No API calls are made for vector generation, keeping your code private.
159
149
 
160
150
  šŸ’” Want more details? (y/n) > n
161
151
 
@@ -163,48 +153,46 @@ the getEmbeddings() method with OpenAI's text-embedding-3-large model.
163
153
  šŸ‘‹ Happy coding!
164
154
  ```
165
155
 
166
-
167
156
  ## šŸ› ļø How It Works
168
157
 
169
158
  1. **Scanning** - Finds all code files (ignoring `node_modules`, `.git`, etc.)
170
159
  2. **Parsing** - Uses AST parsing to extract functions/classes from JS/TS
171
- 3. **Chunking** - Breaks other languages into logical chunks
172
- 4. **Embedding** - Generates vector embeddings for each code chunk
160
+ 3. **Chunking** - Breaks code into logical chunks
161
+ 4. **Local Embedding** - Generates vector embeddings **locally** (no API calls!)
173
162
  5. **Storage** - Saves to your PostgreSQL/Neon database
174
- 6. **Search** - Semantic search + AI-powered conversational answers
175
-
163
+ 6. **Search** - Semantic search using cosine similarity + AI-powered conversational answers
176
164
 
177
165
  ## šŸŽÆ Use Cases
178
166
 
179
167
  - **Context switching** - Quickly understand projects you haven't touched in months
180
168
  - **Onboarding** - New team members can ask questions instead of reading docs
181
169
  - **Code archaeology** - Find implementations without grepping
182
- - **AI coding assistants** - Give Cursor/Claude/ChatGPT persistent codebase context
170
+ - **AI coding assistants** - Give Cursor/Windsurf/Cline/Continue/Aider persistent codebase context
183
171
  - **Documentation** - Auto-generate explanations of how things work
184
-
172
+ - **Privacy-focused** - All embeddings generated locally, no code sent to embedding APIs
185
173
 
186
174
  ## šŸ“‹ Commands Reference
187
175
 
188
176
  | Command | Description |
189
177
  |---------|-------------|
190
178
  | `quack` | Start interactive REPL (auto-indexes first time) |
191
- | `quack --cursor` | Generate `.cursorrules` for Cursor AI |
192
- | `quack --watch` | Watch mode - auto-update Cursor context on file changes |
179
+ | `quack --context` | Generate context files for ALL AI coding tools |
180
+ | `quack --watch` | Watch mode - auto-update context on file changes |
193
181
  | `quack --reindex` | Force reindex the entire codebase |
194
-
182
+ | `quack --cursor` | [DEPRECATED] Use `--context` instead |
195
183
 
196
184
  ## šŸ”‘ Supported AI Providers
197
185
 
198
- | Provider | Chat | Embeddings | Cost | Setup |
199
- |----------|------|------------|------|-------|
200
- | OpenAI | āœ… GPT-4o-mini | āœ… | $$ | [Get key](https://platform.openai.com/api-keys) |
201
- | Anthropic | āœ… Claude 3.5 | āŒ | $$$ | [Get key](https://console.anthropic.com/) |
202
- | Gemini | āœ… Gemini 1.5 | āŒ | FREE | [Get key](https://aistudio.google.com/app/apikey) |
203
- | DeepSeek | āœ… | āœ… | $ | [Get key](https://platform.deepseek.com/) |
204
- | Mistral | āœ… | āŒ | $$ | [Get key](https://console.mistral.ai/) |
205
-
206
- **Note:** If you use Claude, Gemini, or Mistral for chat, you still need an OpenAI or DeepSeek key for embeddings.
186
+ | Provider | Used For | Cost | Privacy | Setup |
187
+ |----------|----------|------|---------|-------|
188
+ | **Local** | Embeddings | FREE | šŸ”’ 100% Private | Built-in |
189
+ | OpenAI | Chat answers | $$ | Query only | [Get key](https://platform.openai.com/api-keys) |
190
+ | Anthropic | Chat answers | $$$ | Query only | [Get key](https://console.anthropic.com/) |
191
+ | Gemini | Chat answers | FREE | Query only | [Get key](https://aistudio.google.com/app/apikey) |
192
+ | DeepSeek | Chat answers | $ | Query only | [Get key](https://platform.deepseek.com/) |
193
+ | Mistral | Chat answers | $$ | Query only | [Get key](https://console.mistral.ai/) |
207
194
 
195
+ **Privacy Note:** QuackStack generates embeddings **locally** on your machine. Only your natural language queries and retrieved code context are sent to the AI provider for generating conversational answers. Your entire codebase is never sent to any API.
208
196
 
209
197
  ## šŸ—„ļø Database Schema
210
198
 
@@ -212,7 +200,7 @@ the getEmbeddings() method with OpenAI's text-embedding-3-large model.
212
200
  model codeSnippet {
213
201
  id Int @id @default(autoincrement())
214
202
  content String
215
- embedding Json
203
+ embedding Json // Stored as JSON array of numbers
216
204
  filePath String
217
205
  projectName String
218
206
  language String?
@@ -228,12 +216,10 @@ model codeSnippet {
228
216
 
229
217
  Each project is isolated by `projectName` (uses current directory name).
230
218
 
231
-
232
219
  ## šŸŒ Supported Languages
233
220
 
234
221
  JavaScript, TypeScript, Python, Go, Rust, Java, C, C++, C#, Ruby, PHP, Swift, Kotlin, Scala, R, Vue, Svelte
235
222
 
236
-
237
223
  ## šŸŽ“ Development
238
224
 
239
225
  ```bash
@@ -243,24 +229,23 @@ pnpm install
243
229
 
244
230
  pnpm build
245
231
 
232
+ # Run locally
246
233
  node dist/cli.cjs
247
-
248
- node dist/cli.cjs --cursor
234
+ node dist/cli.cjs --context
249
235
  node dist/cli.cjs --watch
250
236
  ```
251
237
 
252
-
253
238
  ## šŸ—ŗļø Roadmap
254
239
 
255
- - [ ] Support more embedding providers (Cohere, Voyage AI)
256
- - [ ] Add filtering by file type, date range, author
257
- - [ ] Generate automatic codebase documentation
258
- - [ ] Export Q&A sessions as markdown docs
240
+ - [x] Local embeddings (no API calls!)
241
+ - [x] Support for all major AI coding assistants
259
242
  - [ ] VS Code extension
260
243
  - [ ] Official Cursor plugin
244
+ - [ ] Export Q&A sessions as markdown docs
245
+ - [ ] Add filtering by file type, date range, author
261
246
  - [ ] Support for code diffs and change tracking
262
247
  - [ ] Team collaboration features
263
-
248
+ - [ ] Self-hosted web UI
264
249
 
265
250
  ## šŸ¤ Contributing
266
251
 
@@ -269,20 +254,22 @@ Contributions welcome! Feel free to:
269
254
  - Submit feature requests
270
255
  - Open pull requests
271
256
 
272
-
273
257
  ## šŸ“„ License
274
258
 
275
259
  MIT
276
260
 
277
-
278
261
  ## šŸ’” Pro Tips
279
262
 
280
- **Gemini Free Tier**: Start with Google Gemini - it's free and works great for most use cases.
263
+ **Privacy First**: Embeddings are generated locally - your code never leaves your machine during indexing.
264
+
265
+ **Gemini Free Tier**: Start with Google Gemini for chat responses - it's free and works great for most use cases.
281
266
 
282
- **DeepSeek for Production**: If you need cheap embeddings at scale, use DeepSeek (~$0.14 per million tokens).
267
+ **Universal Context**: Run `quack --context` once to generate context files for ALL major AI coding tools at once.
283
268
 
284
- **Cursor Integration**: Run `quack --cursor` once, then `quack --watch &` in the background to keep context always fresh.
269
+ **Background Watcher**: Run `quack --watch &` in the background to keep context always fresh across all your AI tools.
285
270
 
286
271
  **Multiple Projects**: Each project gets its own namespace in the database. Just run `quack` in different directories.
287
272
 
288
- **Large Codebases**: First index might take a few minutes. After that, only changed files are re-indexed.
273
+ **Large Codebases**: First index might take a few minutes. After that, only changed files are re-indexed.
274
+
275
+ **No Vendor Lock-in**: Unlike other tools, QuackStack works with Cursor, Windsurf, Cline, Continue, and Aider - choose your favorite!
@@ -0,0 +1,109 @@
1
+ import fs from "fs";
2
+ import path from "path";
3
+ import { client } from "./database.js";
4
+ export async function detectFileChanges(rootDir, projectName) {
5
+ try {
6
+ const existingSnippets = await client.codeSnippet.findMany({
7
+ where: { projectName },
8
+ select: { filePath: true, updatedAt: true },
9
+ distinct: ['filePath'],
10
+ });
11
+ if (existingSnippets.length === 0) {
12
+ return null;
13
+ }
14
+ const lastIndexTime = existingSnippets.reduce((latest, snippet) => {
15
+ return snippet.updatedAt > latest ? snippet.updatedAt : latest;
16
+ }, new Date(0));
17
+ const indexedFiles = new Set(existingSnippets.map(s => s.filePath));
18
+ const currentFiles = await scanDirectory(rootDir);
19
+ const currentFilesSet = new Set(currentFiles);
20
+ let newFiles = 0;
21
+ let modifiedFiles = 0;
22
+ let deletedFiles = 0;
23
+ for (const filePath of currentFiles) {
24
+ if (!indexedFiles.has(filePath)) {
25
+ newFiles++;
26
+ }
27
+ else {
28
+ try {
29
+ const stats = fs.statSync(filePath);
30
+ if (stats.mtime > lastIndexTime) {
31
+ modifiedFiles++;
32
+ }
33
+ }
34
+ catch (error) {
35
+ continue;
36
+ }
37
+ }
38
+ }
39
+ for (const indexedFile of indexedFiles) {
40
+ if (!currentFilesSet.has(indexedFile)) {
41
+ deletedFiles++;
42
+ }
43
+ }
44
+ const totalChanges = newFiles + modifiedFiles + deletedFiles;
45
+ return {
46
+ newFiles,
47
+ modifiedFiles,
48
+ deletedFiles,
49
+ totalChanges,
50
+ };
51
+ }
52
+ catch (error) {
53
+ console.error("Error detecting file changes:", error);
54
+ return null;
55
+ }
56
+ }
57
+ async function scanDirectory(dir) {
58
+ const files = [];
59
+ const ignoreDirs = new Set([
60
+ 'node_modules',
61
+ '.git',
62
+ 'dist',
63
+ 'build',
64
+ '.next',
65
+ 'coverage',
66
+ '.vscode',
67
+ '.idea'
68
+ ]);
69
+ function walk(currentPath) {
70
+ try {
71
+ const entries = fs.readdirSync(currentPath, { withFileTypes: true });
72
+ for (const entry of entries) {
73
+ const fullPath = path.join(currentPath, entry.name);
74
+ if (entry.isDirectory()) {
75
+ if (!ignoreDirs.has(entry.name) && !entry.name.startsWith('.')) {
76
+ walk(fullPath);
77
+ }
78
+ }
79
+ else if (entry.isFile()) {
80
+ const ext = path.extname(entry.name);
81
+ const codeExtensions = [
82
+ '.js', '.ts', '.jsx', '.tsx', '.py', '.java', '.cpp', '.c',
83
+ '.go', '.rs', '.rb', '.php', '.cs', '.swift', '.kt', '.scala'
84
+ ];
85
+ if (codeExtensions.includes(ext)) {
86
+ files.push(fullPath);
87
+ }
88
+ }
89
+ }
90
+ }
91
+ catch (error) {
92
+ }
93
+ }
94
+ walk(dir);
95
+ return files;
96
+ }
97
+ export function formatChangeMessage(stats) {
98
+ const parts = [];
99
+ if (stats.newFiles > 0) {
100
+ parts.push(`${stats.newFiles} new file${stats.newFiles > 1 ? 's' : ''}`);
101
+ }
102
+ if (stats.modifiedFiles > 0) {
103
+ parts.push(`${stats.modifiedFiles} modified file${stats.modifiedFiles > 1 ? 's' : ''}`);
104
+ }
105
+ if (stats.deletedFiles > 0) {
106
+ parts.push(`${stats.deletedFiles} deleted file${stats.deletedFiles > 1 ? 's' : ''}`);
107
+ }
108
+ return parts.join(', ');
109
+ }
package/dist/repl.js CHANGED
@@ -1,79 +1,83 @@
1
1
  import readline from "readline";
2
2
  import chalk from "chalk";
3
- import { ingest } from "./commands/ingest.js";
4
3
  import { search } from "./commands/search.js";
4
+ import { ingest } from "./commands/ingest.js";
5
5
  import { client } from "./lib/database.js";
6
6
  import path from "path";
7
+ import { detectFileChanges, formatChangeMessage } from "./lib/file-change-detector.js";
7
8
  const PROJECT_NAME = path.basename(process.cwd());
8
- async function ensureIngested(forceReindex = false) {
9
- const count = await client.codeSnippet.count({
10
- where: { projectName: PROJECT_NAME }
9
+ export async function startREPL(forceReindex = false) {
10
+ console.log(chalk.cyan("\nšŸ’” Tip: Press Ctrl+C to exit\n"));
11
+ if (!forceReindex) {
12
+ const changes = await detectFileChanges(process.cwd(), PROJECT_NAME);
13
+ if (changes && changes.totalChanges > 0) {
14
+ console.log(chalk.yellow(`\nāš ļø Detected ${changes.totalChanges} file change${changes.totalChanges > 1 ? 's' : ''} since last index:`));
15
+ console.log(chalk.yellow(` ${formatChangeMessage(changes)}`));
16
+ console.log(chalk.yellow(` Run 'quack --reindex' for best results.\n`));
17
+ const shouldReindex = await promptUser(chalk.yellow("Would you like to reindex now? (y/n) > "));
18
+ if (shouldReindex.toLowerCase() === 'y') {
19
+ forceReindex = true;
20
+ }
21
+ }
22
+ }
23
+ const existingCount = await client.codeSnippet.count({
24
+ where: { projectName: PROJECT_NAME },
11
25
  });
12
- if (count === 0 || forceReindex) {
13
- if (forceReindex && count > 0) {
14
- console.log("šŸ—‘ļø Clearing old index...");
26
+ if (existingCount === 0 || forceReindex) {
27
+ if (forceReindex) {
28
+ console.log(chalk.gray("šŸ—‘ļø Clearing old index..."));
15
29
  await client.codeSnippet.deleteMany({
16
- where: { projectName: PROJECT_NAME }
30
+ where: { projectName: PROJECT_NAME },
17
31
  });
18
32
  }
19
- console.log("šŸ” Indexing your codebase (this may take a moment)...");
33
+ console.log(chalk.gray("šŸ” Indexing your codebase (this may take a moment)..."));
20
34
  await ingest(process.cwd(), PROJECT_NAME, true);
21
- console.log("āœ… Indexing complete!\n");
35
+ console.log(chalk.green("āœ… Indexing complete!"));
22
36
  }
23
- }
24
- function stripMarkdown(text) {
25
- return text
26
- .replace(/\*\*(.+?)\*\*/g, chalk.bold('$1'))
27
- .replace(/\*(.+?)\*/g, chalk.italic('$1'))
28
- .replace(/`(.+?)`/g, chalk.cyan('$1'))
29
- .replace(/^#{1,6}\s+(.+)$/gm, chalk.bold.blue('$1'));
30
- }
31
- export async function startREPL(forceReindex = false) {
32
- await ensureIngested(forceReindex);
33
- console.log("šŸ’” Tip: Press Ctrl+C to exit\n");
34
37
  const rl = readline.createInterface({
35
38
  input: process.stdin,
36
39
  output: process.stdout,
37
- terminal: true
40
+ prompt: chalk.yellow("🐄 Quack! How can I help? > "),
38
41
  });
39
- let waitingForDetails = false;
40
- let currentSources = [];
41
- rl.on("line", async (input) => {
42
- const trimmed = input.trim().toLowerCase();
43
- if (waitingForDetails) {
44
- waitingForDetails = false;
45
- if (trimmed === "y" || trimmed === "yes") {
46
- console.log("\nšŸ“š Relevant Code:\n");
47
- currentSources.slice(0, 3).forEach((r, i) => {
48
- console.log(chalk.dim(`[${i + 1}] ${r.filePath} (relevance: ${(r.score * 100).toFixed(1)}%)`));
49
- console.log(chalk.gray(r.content));
50
- console.log(chalk.dim("---\n"));
51
- });
52
- }
53
- console.log();
54
- rl.prompt();
55
- return;
56
- }
57
- if (!trimmed) {
42
+ rl.prompt();
43
+ rl.on("line", async (line) => {
44
+ const query = line.trim();
45
+ if (!query) {
58
46
  rl.prompt();
59
47
  return;
60
48
  }
61
49
  try {
62
- const { answer, sources } = await search(input, PROJECT_NAME);
63
- currentSources = sources;
64
- console.log("\n" + stripMarkdown(answer) + "\n");
65
- waitingForDetails = true;
66
- process.stdout.write("šŸ’” Want more details? (y/n) > ");
50
+ const { answer, sources } = await search(query, PROJECT_NAME);
51
+ console.log(chalk.white(`\n${answer}\n`));
52
+ const showDetails = await promptUser(chalk.cyan("šŸ’” Want more details? (y/n) > "));
53
+ if (showDetails.toLowerCase() === "y") {
54
+ console.log(chalk.blue("\nšŸ“š Relevant Code:\n"));
55
+ sources.forEach((src, i) => {
56
+ console.log(chalk.gray(`[${i + 1}] ${src.filePath} (relevance: ${(src.score * 100).toFixed(1)}%)`));
57
+ console.log(chalk.white(src.content));
58
+ console.log(chalk.gray("\n---\n"));
59
+ });
60
+ }
67
61
  }
68
62
  catch (error) {
69
- console.error(chalk.red("āŒ Error:"), error instanceof Error ? error.message : "Unknown error");
70
- rl.prompt();
63
+ console.error(chalk.red(`\nError: ${error.message}\n`));
71
64
  }
65
+ rl.prompt();
72
66
  });
73
67
  rl.on("close", () => {
74
- console.log("\nšŸ‘‹ Happy coding!");
68
+ console.log(chalk.gray("\nšŸ‘‹ Happy coding!"));
75
69
  process.exit(0);
76
70
  });
77
- rl.setPrompt("🐄 Quack! How can I help? > ");
78
- rl.prompt();
71
+ }
72
+ function promptUser(question) {
73
+ const rl = readline.createInterface({
74
+ input: process.stdin,
75
+ output: process.stdout,
76
+ });
77
+ return new Promise((resolve) => {
78
+ rl.question(question, (answer) => {
79
+ rl.close();
80
+ resolve(answer);
81
+ });
82
+ });
79
83
  }
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "quackstack",
3
- "version": "1.0.6",
3
+ "version": "1.0.8",
4
4
  "description": "Your cracked unpaid intern for all things codebase related! AI-powered codebase search and Q&A.",
5
5
  "type": "module",
6
6
  "main": "dist/cli.cjs",