wiggum-cli 0.5.4 → 0.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. package/README.md +88 -22
  2. package/dist/ai/conversation/conversation-manager.d.ts +84 -0
  3. package/dist/ai/conversation/conversation-manager.d.ts.map +1 -0
  4. package/dist/ai/conversation/conversation-manager.js +159 -0
  5. package/dist/ai/conversation/conversation-manager.js.map +1 -0
  6. package/dist/ai/conversation/index.d.ts +8 -0
  7. package/dist/ai/conversation/index.d.ts.map +1 -0
  8. package/dist/ai/conversation/index.js +8 -0
  9. package/dist/ai/conversation/index.js.map +1 -0
  10. package/dist/ai/conversation/spec-generator.d.ts +62 -0
  11. package/dist/ai/conversation/spec-generator.d.ts.map +1 -0
  12. package/dist/ai/conversation/spec-generator.js +267 -0
  13. package/dist/ai/conversation/spec-generator.js.map +1 -0
  14. package/dist/ai/conversation/url-fetcher.d.ts +26 -0
  15. package/dist/ai/conversation/url-fetcher.d.ts.map +1 -0
  16. package/dist/ai/conversation/url-fetcher.js +145 -0
  17. package/dist/ai/conversation/url-fetcher.js.map +1 -0
  18. package/dist/cli.d.ts.map +1 -1
  19. package/dist/cli.js +44 -34
  20. package/dist/cli.js.map +1 -1
  21. package/dist/commands/init.d.ts +19 -0
  22. package/dist/commands/init.d.ts.map +1 -1
  23. package/dist/commands/init.js +61 -21
  24. package/dist/commands/init.js.map +1 -1
  25. package/dist/commands/new.d.ts +11 -1
  26. package/dist/commands/new.d.ts.map +1 -1
  27. package/dist/commands/new.js +102 -43
  28. package/dist/commands/new.js.map +1 -1
  29. package/dist/commands/run.js +3 -3
  30. package/dist/commands/run.js.map +1 -1
  31. package/dist/generator/config.d.ts +3 -3
  32. package/dist/generator/config.d.ts.map +1 -1
  33. package/dist/generator/config.js +5 -3
  34. package/dist/generator/config.js.map +1 -1
  35. package/dist/generator/index.js +1 -1
  36. package/dist/generator/index.js.map +1 -1
  37. package/dist/generator/writer.js +1 -1
  38. package/dist/generator/writer.js.map +1 -1
  39. package/dist/index.d.ts +1 -0
  40. package/dist/index.d.ts.map +1 -1
  41. package/dist/index.js +34 -0
  42. package/dist/index.js.map +1 -1
  43. package/dist/repl/command-parser.d.ts +84 -0
  44. package/dist/repl/command-parser.d.ts.map +1 -0
  45. package/dist/repl/command-parser.js +112 -0
  46. package/dist/repl/command-parser.js.map +1 -0
  47. package/dist/repl/index.d.ts +8 -0
  48. package/dist/repl/index.d.ts.map +1 -0
  49. package/dist/repl/index.js +8 -0
  50. package/dist/repl/index.js.map +1 -0
  51. package/dist/repl/repl-loop.d.ts +30 -0
  52. package/dist/repl/repl-loop.d.ts.map +1 -0
  53. package/dist/repl/repl-loop.js +262 -0
  54. package/dist/repl/repl-loop.js.map +1 -0
  55. package/dist/repl/session-state.d.ts +37 -0
  56. package/dist/repl/session-state.d.ts.map +1 -0
  57. package/dist/repl/session-state.js +26 -0
  58. package/dist/repl/session-state.js.map +1 -0
  59. package/dist/templates/root/README.md.tmpl +1 -1
  60. package/dist/templates/scripts/feature-loop.sh.tmpl +17 -17
  61. package/dist/templates/scripts/loop.sh.tmpl +7 -7
  62. package/dist/templates/scripts/ralph-monitor.sh.tmpl +5 -5
  63. package/dist/utils/config.d.ts +7 -7
  64. package/dist/utils/config.js +4 -4
  65. package/dist/utils/config.js.map +1 -1
  66. package/package.json +1 -1
  67. package/src/ai/conversation/conversation-manager.ts +230 -0
  68. package/src/ai/conversation/index.ts +23 -0
  69. package/src/ai/conversation/spec-generator.ts +327 -0
  70. package/src/ai/conversation/url-fetcher.ts +180 -0
  71. package/src/cli.ts +47 -34
  72. package/src/commands/init.ts +86 -22
  73. package/src/commands/new.ts +121 -44
  74. package/src/commands/run.ts +3 -3
  75. package/src/generator/config.ts +5 -3
  76. package/src/generator/index.ts +1 -1
  77. package/src/generator/writer.ts +1 -1
  78. package/src/index.ts +46 -0
  79. package/src/repl/command-parser.ts +154 -0
  80. package/src/repl/index.ts +23 -0
  81. package/src/repl/repl-loop.ts +339 -0
  82. package/src/repl/session-state.ts +63 -0
  83. package/src/templates/config/ralph.config.cjs.tmpl +38 -0
  84. package/src/templates/root/README.md.tmpl +1 -1
  85. package/src/templates/scripts/feature-loop.sh.tmpl +17 -17
  86. package/src/templates/scripts/loop.sh.tmpl +7 -7
  87. package/src/templates/scripts/ralph-monitor.sh.tmpl +5 -5
  88. package/src/utils/config.ts +9 -9
  89. /package/{src/templates/config/ralph.config.js.tmpl → dist/templates/config/ralph.config.cjs.tmpl} +0 -0
package/README.md CHANGED
@@ -32,20 +32,24 @@ npm install -g wiggum-cli
32
32
  ## Quick Start
33
33
 
34
34
  ```bash
35
- # 1. Initialize Wiggum in your project
36
- npx wiggum-cli init
37
-
38
- # 2. Create a new feature specification
39
- wiggum new my-feature
40
-
41
- # 3. Edit the spec file (opens in your editor)
42
- wiggum new my-feature --edit
35
+ # REPL-first: Just run wiggum to start interactive mode
36
+ wiggum
37
+
38
+ # Inside the REPL:
39
+ wiggum> /init # Initialize (scans project, configures AI)
40
+ wiggum> /new my-feature # Create spec with AI interview
41
+ wiggum> /run my-feature # Run the development loop
42
+ wiggum> /exit # Exit when done
43
+ ```
43
44
 
44
- # 4. Run the feature development loop
45
- wiggum run my-feature
45
+ ### CLI Mode (for scripts/CI)
46
46
 
47
- # 5. Monitor progress in real-time
48
- wiggum monitor my-feature
47
+ ```bash
48
+ # Traditional CLI commands still work
49
+ wiggum init # Initialize project
50
+ wiggum new my-feature --ai # Create spec with AI
51
+ wiggum run my-feature # Run the loop
52
+ wiggum monitor my-feature # Monitor progress
49
53
  ```
50
54
 
51
55
  ## Commands
@@ -62,6 +66,7 @@ wiggum init [options]
62
66
  | Flag | Description |
63
67
  |------|-------------|
64
68
  | `--provider <name>` | AI provider: `anthropic`, `openai`, or `openrouter` (default: `anthropic`) |
69
+ | `-i, --interactive` | Stay in interactive REPL mode after initialization |
65
70
  | `-y, --yes` | Accept defaults and skip confirmations |
66
71
 
67
72
  **Examples:**
@@ -69,6 +74,9 @@ wiggum init [options]
69
74
  # Initialize with AI analysis (interactive)
70
75
  wiggum init
71
76
 
77
+ # Initialize and enter REPL mode
78
+ wiggum init -i
79
+
72
80
  # Initialize with OpenAI provider
73
81
  wiggum init --provider openai
74
82
 
@@ -143,7 +151,7 @@ wiggum monitor my-feature --bash
143
151
 
144
152
  ### `wiggum new <feature>`
145
153
 
146
- Create a new feature specification from template.
154
+ Create a new feature specification from template or AI-powered interview.
147
155
 
148
156
  ```bash
149
157
  wiggum new <feature> [options]
@@ -152,6 +160,9 @@ wiggum new <feature> [options]
152
160
  **Options:**
153
161
  | Flag | Description |
154
162
  |------|-------------|
163
+ | `--ai` | Use AI interview to generate the spec |
164
+ | `--provider <name>` | AI provider for spec generation |
165
+ | `--model <model>` | Model to use for AI spec generation |
155
166
  | `-e, --edit` | Open in editor after creation |
156
167
  | `--editor <editor>` | Editor to use (defaults to `$EDITOR` or `code`) |
157
168
  | `-y, --yes` | Skip confirmation prompts |
@@ -159,7 +170,10 @@ wiggum new <feature> [options]
159
170
 
160
171
  **Examples:**
161
172
  ```bash
162
- # Create a new spec with interactive prompts
173
+ # Create spec with AI interview (recommended)
174
+ wiggum new user-dashboard --ai
175
+
176
+ # Create a new spec from template
163
177
  wiggum new user-dashboard
164
178
 
165
179
  # Create and open in VS Code
@@ -172,13 +186,70 @@ wiggum new user-dashboard --edit --editor vim --yes
172
186
  wiggum new user-dashboard --force
173
187
  ```
174
188
 
189
+ **AI Mode (`--ai`):**
190
+ The AI-powered spec generation guides you through a 4-phase interview:
191
+ 1. **Context Gathering** - Share reference URLs or files for context
192
+ 2. **Goals Discussion** - Describe what you want to build
193
+ 3. **Interview** - AI asks clarifying questions (3-5 questions typically)
194
+ 4. **Generation** - AI generates a detailed, project-specific specification
195
+
196
+ ## Interactive REPL Mode (Default)
197
+
198
+ **REPL-first:** Running `wiggum` with no arguments opens the interactive REPL:
199
+
200
+ ```bash
201
+ wiggum
202
+ ```
203
+
204
+ This is the recommended way to use Wiggum - all commands are available interactively with persistent session state.
205
+
206
+ ### REPL Commands
207
+
208
+ | Command | Alias | Description |
209
+ |---------|-------|-------------|
210
+ | `/init` | `/i` | Initialize Wiggum in this project |
211
+ | `/new <feature>` | `/n` | Create a new feature spec (AI interview) |
212
+ | `/run <feature>` | `/r` | Run the feature development loop |
213
+ | `/monitor <feature>` | `/m` | Monitor a running feature |
214
+ | `/help` | `/h`, `/?` | Show available commands |
215
+ | `/exit` | `/q`, `/quit` | Exit the REPL |
216
+
217
+ ### Example Session
218
+
219
+ ```bash
220
+ $ wiggum
221
+
222
+ Wiggum Interactive Mode
223
+ Not initialized. Run /init to set up this project.
224
+
225
+ wiggum> /init
226
+ # Scans project, prompts for API key, runs AI analysis...
227
+
228
+ wiggum> /new user-dashboard
229
+ # AI interview starts (no --ai flag needed in REPL)...
230
+
231
+ wiggum> /run user-dashboard
232
+ # Development loop starts...
233
+
234
+ wiggum> /exit
235
+ ```
236
+
237
+ ### Benefits of REPL Mode
238
+
239
+ - **Simpler mental model** - One entry point, everything inside REPL
240
+ - **No flags to remember** - AI mode is default for `/new`
241
+ - **Persistent session** - Scan result, provider, model carry through
242
+ - **Discoverable commands** - `/help` always available
243
+
244
+ ---
245
+
175
246
  ## Generated Files Structure
176
247
 
177
248
  After running `wiggum init`, the following structure is created:
178
249
 
179
250
  ```
180
251
  .ralph/
181
- ├── ralph.config.js # Main configuration file
252
+ ├── ralph.config.cjs # Main configuration file
182
253
  ├── prompts/ # AI prompt templates
183
254
  │ ├── PROMPT.md # Implementation prompt
184
255
  │ ├── PROMPT_feature.md # Feature planning prompt
@@ -262,10 +333,10 @@ Wiggum uses a 4-phase multi-agent architecture:
262
333
 
263
334
  ## Configuration
264
335
 
265
- ### ralph.config.js
336
+ ### ralph.config.cjs
266
337
 
267
338
  ```javascript
268
- export default {
339
+ module.exports = {
269
340
  // Project paths
270
341
  paths: {
271
342
  root: '.ralph',
@@ -281,11 +352,6 @@ export default {
281
352
  defaultModel: 'sonnet',
282
353
  },
283
354
 
284
- // AI settings
285
- ai: {
286
- provider: 'anthropic',
287
- },
288
-
289
355
  // Detected stack (auto-populated)
290
356
  stack: {
291
357
  framework: { name: 'Next.js', version: '14.0.0' },
@@ -0,0 +1,84 @@
1
+ /**
2
+ * Conversation Manager
3
+ * Manages multi-turn AI conversations for spec generation
4
+ */
5
+ import { type AIProvider } from '../providers.js';
6
+ import type { ScanResult } from '../../scanner/types.js';
7
+ /**
8
+ * Conversation message
9
+ */
10
+ export interface ConversationMessage {
11
+ role: 'user' | 'assistant' | 'system';
12
+ content: string;
13
+ }
14
+ /**
15
+ * Conversation context
16
+ */
17
+ export interface ConversationContext {
18
+ codebaseSummary?: string;
19
+ references: Array<{
20
+ source: string;
21
+ content: string;
22
+ }>;
23
+ }
24
+ /**
25
+ * Conversation manager options
26
+ */
27
+ export interface ConversationManagerOptions {
28
+ provider: AIProvider;
29
+ model: string;
30
+ systemPrompt?: string;
31
+ }
32
+ /**
33
+ * Manages a multi-turn conversation with an AI model
34
+ */
35
+ export declare class ConversationManager {
36
+ private messages;
37
+ private context;
38
+ private readonly provider;
39
+ private readonly modelId;
40
+ private readonly systemPrompt;
41
+ constructor(options: ConversationManagerOptions);
42
+ private getDefaultSystemPrompt;
43
+ /**
44
+ * Set codebase context from scan result
45
+ */
46
+ setCodebaseContext(scanResult: ScanResult): void;
47
+ /**
48
+ * Add a reference document to the context
49
+ */
50
+ addReference(content: string, source: string): void;
51
+ /**
52
+ * Clear all references
53
+ */
54
+ clearReferences(): void;
55
+ /**
56
+ * Get the current context as a string for inclusion in prompts
57
+ */
58
+ private getContextString;
59
+ /**
60
+ * Build the full message array for the AI
61
+ */
62
+ private buildMessages;
63
+ /**
64
+ * Send a message and get a response
65
+ */
66
+ chat(userMessage: string): Promise<string>;
67
+ /**
68
+ * Send a message and stream the response
69
+ */
70
+ chatStream(userMessage: string): AsyncIterable<string>;
71
+ /**
72
+ * Get conversation history
73
+ */
74
+ getHistory(): ConversationMessage[];
75
+ /**
76
+ * Clear conversation history
77
+ */
78
+ clearHistory(): void;
79
+ /**
80
+ * Add a message to history without sending to AI
81
+ */
82
+ addToHistory(message: ConversationMessage): void;
83
+ }
84
+ //# sourceMappingURL=conversation-manager.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"conversation-manager.d.ts","sourceRoot":"","sources":["../../../src/ai/conversation/conversation-manager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAGH,OAAO,EAA8B,KAAK,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAC9E,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAUzD;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,IAAI,EAAE,MAAM,GAAG,WAAW,GAAG,QAAQ,CAAC;IACtC,OAAO,EAAE,MAAM,CAAC;CACjB;AAED;;GAEG;AACH,MAAM,WAAW,mBAAmB;IAClC,eAAe,CAAC,EAAE,MAAM,CAAC;IACzB,UAAU,EAAE,KAAK,CAAC;QAAE,MAAM,EAAE,MAAM,CAAC;QAAC,OAAO,EAAE,MAAM,CAAA;KAAE,CAAC,CAAC;CACxD;AAED;;GAEG;AACH,MAAM,WAAW,0BAA0B;IACzC,QAAQ,EAAE,UAAU,CAAC;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,YAAY,CAAC,EAAE,MAAM,CAAC;CACvB;AAiCD;;GAEG;AACH,qBAAa,mBAAmB;IAC9B,OAAO,CAAC,QAAQ,CAA6B;IAC7C,OAAO,CAAC,OAAO,CAA2C;IAC1D,OAAO,CAAC,QAAQ,CAAC,QAAQ,CAAa;IACtC,OAAO,CAAC,QAAQ,CAAC,OAAO,CAAS;IACjC,OAAO,CAAC,QAAQ,CAAC,YAAY,CAAS;gBAE1B,OAAO,EAAE,0BAA0B;IAM/C,OAAO,CAAC,sBAAsB;IAM9B;;OAEG;IACH,kBAAkB,CAAC,UAAU,EAAE,UAAU,GAAG,IAAI;IAIhD;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,IAAI;IAInD;;OAEG;IACH,eAAe,IAAI,IAAI;IAIvB;;OAEG;IACH,OAAO,CAAC,gBAAgB;IAiBxB;;OAEG;IACH,OAAO,CAAC,aAAa;IAmBrB;;OAEG;IACG,IAAI,CAAC,WAAW,EAAE,MAAM,GAAG,OAAO,CAAC,MAAM,CAAC;IAqBhD;;OAEG;IACI,UAAU,CAAC,WAAW,EAAE,MAAM,GAAG,aAAa,CAAC,MAAM,CAAC;IAwB7D;;OAEG;IACH,UAAU,IAAI,mBAAmB,EAAE;IAInC;;OAEG;IACH,YAAY,IAAI,IAAI;IAIpB;;OAEG;IACH,YAAY,CAAC,OAAO,EAAE,mBAAmB,GAAG,IAAI;CAGjD"}
@@ -0,0 +1,159 @@
1
+ /**
2
+ * Conversation Manager
3
+ * Manages multi-turn AI conversations for spec generation
4
+ */
5
+ import { generateText, streamText } from 'ai';
6
+ import { getModel, isReasoningModel } from '../providers.js';
7
+ /**
8
+ * Format scan result into a concise codebase summary
9
+ */
10
+ function formatCodebaseSummary(scanResult) {
11
+ const { stack } = scanResult;
12
+ const parts = [];
13
+ if (stack.framework) {
14
+ parts.push(`Framework: ${stack.framework.name}${stack.framework.version ? ` v${stack.framework.version}` : ''}`);
15
+ }
16
+ if (stack.testing?.unit) {
17
+ parts.push(`Unit Testing: ${stack.testing.unit.name}`);
18
+ }
19
+ if (stack.testing?.e2e) {
20
+ parts.push(`E2E Testing: ${stack.testing.e2e.name}`);
21
+ }
22
+ if (stack.styling) {
23
+ parts.push(`Styling: ${stack.styling.name}`);
24
+ }
25
+ if (stack.packageManager) {
26
+ parts.push(`Package Manager: ${stack.packageManager.name}`);
27
+ }
28
+ return parts.join('\n');
29
+ }
30
+ /**
31
+ * Manages a multi-turn conversation with an AI model
32
+ */
33
+ export class ConversationManager {
34
+ messages = [];
35
+ context = { references: [] };
36
+ provider;
37
+ modelId;
38
+ systemPrompt;
39
+ constructor(options) {
40
+ this.provider = options.provider;
41
+ this.modelId = options.model;
42
+ this.systemPrompt = options.systemPrompt || this.getDefaultSystemPrompt();
43
+ }
44
+ getDefaultSystemPrompt() {
45
+ return `You are a helpful assistant that helps developers create feature specifications.
46
+ You ask clarifying questions to understand the user's requirements and then help generate a detailed specification.
47
+ Be concise but thorough. Focus on understanding the user's needs before proposing solutions.`;
48
+ }
49
+ /**
50
+ * Set codebase context from scan result
51
+ */
52
+ setCodebaseContext(scanResult) {
53
+ this.context.codebaseSummary = formatCodebaseSummary(scanResult);
54
+ }
55
+ /**
56
+ * Add a reference document to the context
57
+ */
58
+ addReference(content, source) {
59
+ this.context.references.push({ source, content });
60
+ }
61
+ /**
62
+ * Clear all references
63
+ */
64
+ clearReferences() {
65
+ this.context.references = [];
66
+ }
67
+ /**
68
+ * Get the current context as a string for inclusion in prompts
69
+ */
70
+ getContextString() {
71
+ const parts = [];
72
+ if (this.context.codebaseSummary) {
73
+ parts.push(`## Project Tech Stack\n${this.context.codebaseSummary}`);
74
+ }
75
+ if (this.context.references.length > 0) {
76
+ parts.push('## Reference Documents');
77
+ for (const ref of this.context.references) {
78
+ parts.push(`### ${ref.source}\n${ref.content}`);
79
+ }
80
+ }
81
+ return parts.join('\n\n');
82
+ }
83
+ /**
84
+ * Build the full message array for the AI
85
+ */
86
+ buildMessages() {
87
+ const contextString = this.getContextString();
88
+ const fullSystemPrompt = contextString
89
+ ? `${this.systemPrompt}\n\n${contextString}`
90
+ : this.systemPrompt;
91
+ const aiMessages = [
92
+ { role: 'system', content: fullSystemPrompt },
93
+ ];
94
+ for (const msg of this.messages) {
95
+ if (msg.role === 'user' || msg.role === 'assistant') {
96
+ aiMessages.push({ role: msg.role, content: msg.content });
97
+ }
98
+ }
99
+ return aiMessages;
100
+ }
101
+ /**
102
+ * Send a message and get a response
103
+ */
104
+ async chat(userMessage) {
105
+ // Add user message to history
106
+ this.messages.push({ role: 'user', content: userMessage });
107
+ const { model } = getModel(this.provider, this.modelId);
108
+ const messages = this.buildMessages();
109
+ const result = await generateText({
110
+ model,
111
+ messages,
112
+ ...(isReasoningModel(this.modelId) ? {} : { temperature: 0.7 }),
113
+ });
114
+ const assistantMessage = result.text;
115
+ // Add assistant response to history
116
+ this.messages.push({ role: 'assistant', content: assistantMessage });
117
+ return assistantMessage;
118
+ }
119
+ /**
120
+ * Send a message and stream the response
121
+ */
122
+ async *chatStream(userMessage) {
123
+ // Add user message to history
124
+ this.messages.push({ role: 'user', content: userMessage });
125
+ const { model } = getModel(this.provider, this.modelId);
126
+ const messages = this.buildMessages();
127
+ const result = streamText({
128
+ model,
129
+ messages,
130
+ ...(isReasoningModel(this.modelId) ? {} : { temperature: 0.7 }),
131
+ });
132
+ let fullResponse = '';
133
+ for await (const textPart of result.textStream) {
134
+ fullResponse += textPart;
135
+ yield textPart;
136
+ }
137
+ // Add assistant response to history
138
+ this.messages.push({ role: 'assistant', content: fullResponse });
139
+ }
140
+ /**
141
+ * Get conversation history
142
+ */
143
+ getHistory() {
144
+ return [...this.messages];
145
+ }
146
+ /**
147
+ * Clear conversation history
148
+ */
149
+ clearHistory() {
150
+ this.messages = [];
151
+ }
152
+ /**
153
+ * Add a message to history without sending to AI
154
+ */
155
+ addToHistory(message) {
156
+ this.messages.push(message);
157
+ }
158
+ }
159
+ //# sourceMappingURL=conversation-manager.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"conversation-manager.js","sourceRoot":"","sources":["../../../src/ai/conversation/conversation-manager.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EAAE,YAAY,EAAE,UAAU,EAAE,MAAM,IAAI,CAAC;AAC9C,OAAO,EAAE,QAAQ,EAAE,gBAAgB,EAAmB,MAAM,iBAAiB,CAAC;AAoC9E;;GAEG;AACH,SAAS,qBAAqB,CAAC,UAAsB;IACnD,MAAM,EAAE,KAAK,EAAE,GAAG,UAAU,CAAC;IAE7B,MAAM,KAAK,GAAa,EAAE,CAAC;IAE3B,IAAI,KAAK,CAAC,SAAS,EAAE,CAAC;QACpB,KAAK,CAAC,IAAI,CAAC,cAAc,KAAK,CAAC,SAAS,CAAC,IAAI,GAAG,KAAK,CAAC,SAAS,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC;IACnH,CAAC;IAED,IAAI,KAAK,CAAC,OAAO,EAAE,IAAI,EAAE,CAAC;QACxB,KAAK,CAAC,IAAI,CAAC,iBAAiB,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,CAAC;IACzD,CAAC;IAED,IAAI,KAAK,CAAC,OAAO,EAAE,GAAG,EAAE,CAAC;QACvB,KAAK,CAAC,IAAI,CAAC,gBAAgB,KAAK,CAAC,OAAO,CAAC,GAAG,CAAC,IAAI,EAAE,CAAC,CAAC;IACvD,CAAC;IAED,IAAI,KAAK,CAAC,OAAO,EAAE,CAAC;QAClB,KAAK,CAAC,IAAI,CAAC,YAAY,KAAK,CAAC,OAAO,CAAC,IAAI,EAAE,CAAC,CAAC;IAC/C,CAAC;IAED,IAAI,KAAK,CAAC,cAAc,EAAE,CAAC;QACzB,KAAK,CAAC,IAAI,CAAC,oBAAoB,KAAK,CAAC,cAAc,CAAC,IAAI,EAAE,CAAC,CAAC;IAC9D,CAAC;IAED,OAAO,KAAK,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC;AAC1B,CAAC;AAED;;GAEG;AACH,MAAM,OAAO,mBAAmB;IACtB,QAAQ,GAA0B,EAAE,CAAC;IACrC,OAAO,GAAwB,EAAE,UAAU,EAAE,EAAE,EAAE,CAAC;IACzC,QAAQ,CAAa;IACrB,OAAO,CAAS;IAChB,YAAY,CAAS;IAEtC,YAAY,OAAmC;QAC7C,IAAI,CAAC,QAAQ,GAAG,OAAO,CAAC,QAAQ,CAAC;QACjC,IAAI,CAAC,OAAO,GAAG,OAAO,CAAC,KAAK,CAAC;QAC7B,IAAI,CAAC,YAAY,GAAG,OAAO,CAAC,YAAY,IAAI,IAAI,CAAC,sBAAsB,EAAE,CAAC;IAC5E,CAAC;IAEO,sBAAsB;QAC5B,OAAO;;6FAEkF,CAAC;IAC5F,CAAC;IAED;;OAEG;IACH,kBAAkB,CAAC,UAAsB;QACvC,IAAI,CAAC,OAAO,CAAC,eAAe,GAAG,qBAAqB,CAAC,UAAU,CAAC,CAAC;IACnE,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,OAAe,EAAE,MAAc;QAC1C,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,IAAI,CAAC,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC,CAAC;IACpD,CAAC;IAED;;OAEG;IACH,eAAe;QACb,IAAI,CAAC,OAAO,CAAC,UAAU,GAAG,EAAE,CAAC;IAC/B,CAAC;IAED;;OAEG;IACK,gBAAgB;QACtB,MAAM,KAAK,GAAa,EAAE,CAAC;QAE3B,IAAI,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE,CAAC;YACjC,KAAK,CAAC,IAAI,CAAC,0BAA0B,IAAI,CAAC,OAAO,CAAC,eAAe,EAAE,CAAC,CAAC;QACvE,CAAC;QAED,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,CAAC,MAAM,GAAG,CAAC,EAAE,CAAC;YACvC,KAAK,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC;YACrC,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,OAAO,CAAC,UAAU,EAAE,CAAC;gBAC1C,KAAK,CAAC,IAAI,CAAC,OAAO,GAAG,CAAC,MAAM,KAAK,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC;YAClD,CAAC;QACH,CAAC;QAED,OAAO,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC;IAC5B,CAAC;IAED;;OAEG;IACK,aAAa;QACnB,MAAM,aAAa,GAAG,IAAI,CAAC,gBAAgB,EAAE,CAAC;QAC9C,MAAM,gBAAgB,GAAG,aAAa;YACpC,CAAC,CAAC,GAAG,IAAI,CAAC,YAAY,OAAO,aAAa,EAAE;YAC5C,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC;QAEtB,MAAM,UAAU,GAAgB;YAC9B,EAAE,IAAI,EAAE,QAAQ,EAAE,OAAO,EAAE,gBAAgB,EAAE;SAC9C,CAAC;QAEF,KAAK,MAAM,GAAG,IAAI,IAAI,CAAC,QAAQ,EAAE,CAAC;YAChC,IAAI,GAAG,CAAC,IAAI,KAAK,MAAM,IAAI,GAAG,CAAC,IAAI,KAAK,WAAW,EAAE,CAAC;gBACpD,UAAU,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,GAAG,CAAC,IAAI,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,CAAC,CAAC;YAC5D,CAAC;QACH,CAAC;QAED,OAAO,UAAU,CAAC;IACpB,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,IAAI,CAAC,WAAmB;QAC5B,8BAA8B;QAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,CAAC;QAE3D,MAAM,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxD,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAEtC,MAAM,MAAM,GAAG,MAAM,YAAY,CAAC;YAChC,KAAK;YACL,QAAQ;YACR,GAAG,CAAC,gBAAgB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,CAAC;SAChE,CAAC,CAAC;QAEH,MAAM,gBAAgB,GAAG,MAAM,CAAC,IAAI,CAAC;QAErC,oCAAoC;QACpC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,gBAAgB,EAAE,CAAC,CAAC;QAErE,OAAO,gBAAgB,CAAC;IAC1B,CAAC;IAED;;OAEG;IACH,KAAK,CAAC,CAAC,UAAU,CAAC,WAAmB;QACnC,8BAA8B;QAC9B,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,OAAO,EAAE,WAAW,EAAE,CAAC,CAAC;QAE3D,MAAM,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAC,IAAI,CAAC,QAAQ,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC;QACxD,MAAM,QAAQ,GAAG,IAAI,CAAC,aAAa,EAAE,CAAC;QAEtC,MAAM,MAAM,GAAG,UAAU,CAAC;YACxB,KAAK;YACL,QAAQ;YACR,GAAG,CAAC,gBAAgB,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,EAAE,WAAW,EAAE,GAAG,EAAE,CAAC;SAChE,CAAC,CAAC;QAEH,IAAI,YAAY,GAAG,EAAE,CAAC;QAEtB,IAAI,KAAK,EAAE,MAAM,QAAQ,IAAI,MAAM,CAAC,UAAU,EAAE,CAAC;YAC/C,YAAY,IAAI,QAAQ,CAAC;YACzB,MAAM,QAAQ,CAAC;QACjB,CAAC;QAED,oCAAoC;QACpC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,EAAE,IAAI,EAAE,WAAW,EAAE,OAAO,EAAE,YAAY,EAAE,CAAC,CAAC;IACnE,CAAC;IAED;;OAEG;IACH,UAAU;QACR,OAAO,CAAC,GAAG,IAAI,CAAC,QAAQ,CAAC,CAAC;IAC5B,CAAC;IAED;;OAEG;IACH,YAAY;QACV,IAAI,CAAC,QAAQ,GAAG,EAAE,CAAC;IACrB,CAAC;IAED;;OAEG;IACH,YAAY,CAAC,OAA4B;QACvC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC;IAC9B,CAAC;CACF"}
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Conversation Module
3
+ * AI-powered conversation and spec generation
4
+ */
5
+ export { ConversationManager, type ConversationMessage, type ConversationContext, type ConversationManagerOptions, } from './conversation-manager.js';
6
+ export { SpecGenerator, type SpecGeneratorOptions, } from './spec-generator.js';
7
+ export { fetchContent, fetchMultipleSources, isUrl, type FetchedContent, } from './url-fetcher.js';
8
+ //# sourceMappingURL=index.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/ai/conversation/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EACL,mBAAmB,EACnB,KAAK,mBAAmB,EACxB,KAAK,mBAAmB,EACxB,KAAK,0BAA0B,GAChC,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,aAAa,EACb,KAAK,oBAAoB,GAC1B,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EACL,YAAY,EACZ,oBAAoB,EACpB,KAAK,EACL,KAAK,cAAc,GACpB,MAAM,kBAAkB,CAAC"}
@@ -0,0 +1,8 @@
1
+ /**
2
+ * Conversation Module
3
+ * AI-powered conversation and spec generation
4
+ */
5
+ export { ConversationManager, } from './conversation-manager.js';
6
+ export { SpecGenerator, } from './spec-generator.js';
7
+ export { fetchContent, fetchMultipleSources, isUrl, } from './url-fetcher.js';
8
+ //# sourceMappingURL=index.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/ai/conversation/index.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAEH,OAAO,EACL,mBAAmB,GAIpB,MAAM,2BAA2B,CAAC;AAEnC,OAAO,EACL,aAAa,GAEd,MAAM,qBAAqB,CAAC;AAE7B,OAAO,EACL,YAAY,EACZ,oBAAoB,EACpB,KAAK,GAEN,MAAM,kBAAkB,CAAC"}
@@ -0,0 +1,62 @@
1
+ /**
2
+ * Spec Generator
3
+ * AI-powered feature specification generator with interview flow
4
+ */
5
+ import type { AIProvider } from '../providers.js';
6
+ import type { ScanResult } from '../../scanner/types.js';
7
+ /**
8
+ * Spec generator options
9
+ */
10
+ export interface SpecGeneratorOptions {
11
+ featureName: string;
12
+ projectRoot: string;
13
+ provider: AIProvider;
14
+ model: string;
15
+ scanResult?: ScanResult;
16
+ }
17
+ /**
18
+ * Generation phases
19
+ */
20
+ type GeneratorPhase = 'context' | 'goals' | 'interview' | 'generation' | 'complete';
21
+ /**
22
+ * AI-powered spec generator with interview flow
23
+ */
24
+ export declare class SpecGenerator {
25
+ private conversation;
26
+ private phase;
27
+ private readonly featureName;
28
+ private readonly projectRoot;
29
+ private generatedSpec;
30
+ constructor(options: SpecGeneratorOptions);
31
+ /**
32
+ * Phase 1: Gather context from URLs/files
33
+ */
34
+ private gatherContext;
35
+ /**
36
+ * Phase 2: Discuss goals
37
+ */
38
+ private discussGoals;
39
+ /**
40
+ * Phase 3: Conduct interview
41
+ */
42
+ private conductInterview;
43
+ /**
44
+ * Phase 4: Generate spec
45
+ */
46
+ private generateSpec;
47
+ /**
48
+ * Run the full spec generation flow
49
+ * Returns the generated spec or null if cancelled
50
+ */
51
+ run(): Promise<string | null>;
52
+ /**
53
+ * Get the generated spec
54
+ */
55
+ getSpec(): string;
56
+ /**
57
+ * Get current phase
58
+ */
59
+ getPhase(): GeneratorPhase;
60
+ }
61
+ export {};
62
+ //# sourceMappingURL=spec-generator.d.ts.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"spec-generator.d.ts","sourceRoot":"","sources":["../../../src/ai/conversation/spec-generator.ts"],"names":[],"mappings":"AAAA;;;GAGG;AAMH,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,iBAAiB,CAAC;AAClD,OAAO,KAAK,EAAE,UAAU,EAAE,MAAM,wBAAwB,CAAC;AAMzD;;GAEG;AACH,MAAM,WAAW,oBAAoB;IACnC,WAAW,EAAE,MAAM,CAAC;IACpB,WAAW,EAAE,MAAM,CAAC;IACpB,QAAQ,EAAE,UAAU,CAAC;IACrB,KAAK,EAAE,MAAM,CAAC;IACd,UAAU,CAAC,EAAE,UAAU,CAAC;CACzB;AAED;;GAEG;AACH,KAAK,cAAc,GAAG,SAAS,GAAG,OAAO,GAAG,WAAW,GAAG,YAAY,GAAG,UAAU,CAAC;AA+EpF;;GAEG;AACH,qBAAa,aAAa;IACxB,OAAO,CAAC,YAAY,CAAsB;IAC1C,OAAO,CAAC,KAAK,CAA6B;IAC1C,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAS;IACrC,OAAO,CAAC,QAAQ,CAAC,WAAW,CAAS;IACrC,OAAO,CAAC,aAAa,CAAc;gBAEvB,OAAO,EAAE,oBAAoB;IAezC;;OAEG;YACW,aAAa;IA2B3B;;OAEG;YACW,YAAY;IAgC1B;;OAEG;YACW,gBAAgB;IAgD9B;;OAEG;YACW,YAAY;IAsB1B;;;OAGG;IACG,GAAG,IAAI,OAAO,CAAC,MAAM,GAAG,IAAI,CAAC;IAkCnC;;OAEG;IACH,OAAO,IAAI,MAAM;IAIjB;;OAEG;IACH,QAAQ,IAAI,cAAc;CAG3B"}