@zibby/cli 0.1.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,664 @@
1
+ import { existsSync, readFileSync } from 'fs';
2
+ import { join, dirname } from 'path';
3
+ import { fileURLToPath } from 'url';
4
+ import { invokeAgent } from '@zibby/core';
5
+ import { fetchExecutionContext } from '../utils/execution-context.js';
6
+ import { reportProgress, reportFinalStatus } from '../utils/progress-reporter.js';
7
+
8
+ const __filename = fileURLToPath(import.meta.url);
9
+ const __dirname = dirname(__filename);
10
+
11
+ /**
12
+ * Implement a ticket - runs inside ECS container
13
+ *
14
+ * Environment variables expected:
15
+ * - EXECUTION_ID: Unique execution ID
16
+ * - TICKET_KEY: Jira ticket key (e.g., SCRUM-1)
17
+ * - PROJECT_ID: Zibby project ID
18
+ * - REPO_URL: GitHub repo URL
19
+ * - BRANCH: Git branch to checkout (default: main)
20
+ * - GITHUB_TOKEN: GitHub token for creating PR
21
+ */
22
+ export async function implementCommand(_options) {
23
+ const {
24
+ EXECUTION_ID,
25
+ TICKET_KEY,
26
+ PROJECT_ID,
27
+ REPOS,
28
+ _PRIMARY_REPO,
29
+ _GITHUB_TOKEN,
30
+ MODEL
31
+ } = process.env;
32
+
33
+ // Validate required env vars
34
+ if (!EXECUTION_ID || !TICKET_KEY || !PROJECT_ID) {
35
+ console.error('❌ Missing required environment variables:');
36
+ console.error(' EXECUTION_ID, TICKET_KEY, PROJECT_ID');
37
+ process.exit(1);
38
+ }
39
+
40
+ // Fetch large data (ticketContext) from DynamoDB instead of env vars
41
+ const execCtx = await fetchExecutionContext(EXECUTION_ID, PROJECT_ID);
42
+ const ticketContext = execCtx.ticketContext;
43
+ const repos = REPOS ? JSON.parse(REPOS) : execCtx.repos;
44
+ const primaryRepo = repos.find(r => r.isPrimary) || repos[0];
45
+
46
+ // Raw logs will show execution details
47
+
48
+ const workspace = process.cwd(); // /workspace (parent of all repos)
49
+ const results = {
50
+ status: 'running',
51
+ steps: []
52
+ };
53
+
54
+ try {
55
+ // ============================================
56
+ // STEP 1: Start Environment
57
+ // ============================================
58
+ await step('Start Environment', async () => {
59
+ // Just verify environment - no formatted output needed
60
+ // Raw output from commands will be captured automatically
61
+ });
62
+
63
+ // ============================================
64
+ // STEP 2: Clone All Repositories
65
+ // ============================================
66
+ await step('Clone Repositories', async () => {
67
+ const githubToken = process.env.GITHUB_TOKEN;
68
+ const gitlabToken = process.env.GITLAB_TOKEN || '';
69
+ const gitlabUrl = process.env.GITLAB_URL || '';
70
+
71
+ for (const repo of repos) {
72
+ const repoDir = join(workspace, repo.name);
73
+
74
+ // Use token for authentication based on provider
75
+ let cloneUrl = repo.url;
76
+ const isGitlab = repo.provider === 'gitlab' || (gitlabUrl && repo.url.includes(new URL(gitlabUrl).host));
77
+ const isGithub = repo.provider === 'github' || repo.url.includes('github.com');
78
+
79
+ if (isGithub && githubToken) {
80
+ cloneUrl = repo.url.replace('https://github.com', `https://${githubToken}@github.com`);
81
+ } else if (isGitlab && gitlabToken && gitlabUrl) {
82
+ try {
83
+ const gitlabHost = new URL(gitlabUrl).host;
84
+ cloneUrl = repo.url.replace(`https://${gitlabHost}`, `https://oauth2:${gitlabToken}@${gitlabHost}`);
85
+ } catch (e) {
86
+ console.warn(`⚠️ Failed to parse GITLAB_URL: ${e.message}`);
87
+ }
88
+ }
89
+
90
+ // Raw git output will be captured automatically
91
+ execCommand(['git', 'clone', cloneUrl, repoDir], workspace);
92
+ execCommand(['git', 'checkout', repo.branch], repoDir);
93
+
94
+ // Create feature branch only in primary repo
95
+ if (repo.isPrimary) {
96
+ const featureBranch = `feature/${TICKET_KEY.toLowerCase()}`;
97
+ execCommand(['git', 'checkout', '-b', featureBranch], repoDir);
98
+ }
99
+ }
100
+
101
+ results.steps.push({ name: 'clone', status: 'success', repoCount: repos.length });
102
+ });
103
+
104
+ // ============================================
105
+ // STEP 2: Use Ticket Context (from env, includes additionalContext)
106
+ // ============================================
107
+ const ticket = await step('Load Ticket Context', async () => {
108
+ // Ticket context loaded - raw logs will show details
109
+ results.steps.push({ name: 'load_ticket', status: 'success' });
110
+ return ticketContext;
111
+ });
112
+
113
+ // ============================================
114
+ // STEP 3: Install Dependencies (All Repos)
115
+ // ============================================
116
+ await step('Install Dependencies', async () => {
117
+ for (const repo of repos) {
118
+ const repoDir = join(workspace, repo.name);
119
+ const projectInfo = detectProjectInfo(repoDir);
120
+
121
+ // Installing dependencies - raw output will be captured
122
+
123
+ try {
124
+ execCommand(projectInfo.installCommand, repoDir);
125
+ // Dependencies installed - raw output captured
126
+ } catch (_error) {
127
+ // Install failed - error output already captured
128
+ }
129
+ }
130
+ results.steps.push({ name: 'install_deps', status: 'success' });
131
+ });
132
+
133
+ // ============================================
134
+ // STEP 4: Detect Dev Command
135
+ // ============================================
136
+ const devInfo = await step('Detect Dev Command', async () => {
137
+ const primaryRepoDir = join(workspace, primaryRepo.name);
138
+
139
+ // Check for docker-compose first (highest priority)
140
+ const dockerComposePaths = [
141
+ 'docker-compose.yml',
142
+ 'docker-compose.yaml',
143
+ 'compose.yml',
144
+ 'compose.yaml'
145
+ ];
146
+
147
+ for (const composePath of dockerComposePaths) {
148
+ if (existsSync(join(primaryRepoDir, composePath))) {
149
+ // Found docker-compose - raw output will show
150
+ results.steps.push({ name: 'detect_dev', status: 'success', command: 'docker-compose up', type: 'docker-compose' });
151
+ return {
152
+ command: 'docker-compose up',
153
+ type: 'docker-compose',
154
+ configFile: composePath
155
+ };
156
+ }
157
+ }
158
+
159
+ // Fallback to package.json scripts
160
+ const pkgPath = join(primaryRepoDir, 'package.json');
161
+ if (!existsSync(pkgPath)) {
162
+ console.log(' ⚠️ No package.json or docker-compose found');
163
+ results.steps.push({ name: 'detect_dev', status: 'skipped' });
164
+ return null;
165
+ }
166
+
167
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
168
+ const scripts = pkg.scripts || {};
169
+
170
+ let cmd = null;
171
+ if (scripts.dev) cmd = 'npm run dev';
172
+ else if (scripts.start) cmd = 'npm start';
173
+ else if (scripts['dev:local']) cmd = 'npm run dev:local';
174
+
175
+ if (cmd) {
176
+ // Dev command detected - raw output will show
177
+ results.steps.push({ name: 'detect_dev', status: 'success', command: cmd, type: 'npm' });
178
+ return { command: cmd, type: 'npm' };
179
+ }
180
+ // No dev script found
181
+ results.steps.push({ name: 'detect_dev', status: 'skipped' });
182
+ return null;
183
+
184
+ });
185
+
186
+ // ============================================
187
+ // STEP 5: Start Dev Server
188
+ // ============================================
189
+ await step('Start Dev Server', async () => {
190
+ const primaryRepoDir = join(workspace, primaryRepo.name);
191
+
192
+ // Check for docker-compose.test.yml
193
+ const composeFile = 'docker-compose.test.yml';
194
+ if (!existsSync(join(primaryRepoDir, composeFile))) {
195
+ console.log(` ⚠️ No ${composeFile} found, skipping server startup`);
196
+ results.steps.push({ name: 'start_server', status: 'skipped' });
197
+ return null;
198
+ }
199
+
200
+ // Start services - raw docker output will be captured
201
+ execCommand(['docker', 'compose', '-f', composeFile, 'up', '-d'], primaryRepoDir);
202
+
203
+ // Wait for services to be healthy
204
+ await new Promise(resolve => setTimeout(resolve, 10000)); // Wait 10s
205
+ results.steps.push({ name: 'start_server', status: 'success' });
206
+ return true;
207
+ });
208
+
209
+ // ============================================
210
+ // STEP 5: Build Prompt and Call AI Agent
211
+ // ============================================
212
+ await step('Run AI Agent Implementation', async () => {
213
+ // Detect project info for all repos
214
+ const repoInfos = repos.map(repo => {
215
+ const repoDir = join(workspace, repo.name);
216
+ return {
217
+ ...repo,
218
+ ...detectProjectInfo(repoDir)
219
+ };
220
+ });
221
+
222
+ // Build prompt with ticket details + multi-repo context
223
+ const prompt = buildCursorPrompt(ticket, repoInfos, devInfo);
224
+ // Save prompt to file for reference
225
+ const promptPath = join(workspace, '.cursor-prompt.md');
226
+ require('fs').writeFileSync(promptPath, prompt);
227
+
228
+ // Invoke AI agent (strategy auto-selected, runs from parent directory to see all repos)
229
+ await invokeAgent(
230
+ prompt,
231
+ { state: { model: MODEL, workspace } },
232
+ { print: true }
233
+ );
234
+ results.steps.push({ name: 'ai_agent', status: 'success' });
235
+ });
236
+
237
+ // ============================================
238
+ // STEP 6: Run E2E Tests with Video
239
+ // ============================================
240
+ const _testResults = await step('Run E2E Tests', async () => {
241
+ const primaryRepoDir = join(workspace, primaryRepo.name);
242
+
243
+ // Check if Playwright is configured
244
+ if (!existsSync(join(primaryRepoDir, 'playwright.config.js')) &&
245
+ !existsSync(join(primaryRepoDir, 'playwright.config.ts'))) {
246
+ // No Playwright config found
247
+ results.steps.push({ name: 'e2e_tests', status: 'skipped' });
248
+ return null;
249
+ }
250
+
251
+ try {
252
+ // Run Playwright with video recording - raw output will be captured
253
+ execCommand('npx playwright test --reporter=json', primaryRepoDir);
254
+ results.steps.push({ name: 'e2e_tests', status: 'success' });
255
+ return { passed: true };
256
+ } catch (error) {
257
+ // Error output already captured
258
+
259
+ // Stop docker-compose
260
+ execCommand('docker compose -f docker-compose.test.yml down', primaryRepoDir, { allowFailure: true });
261
+
262
+ throw new Error(`E2E tests failed: ${error.message}`, { cause: error });
263
+ }
264
+ });
265
+
266
+ // Stop dev server silently (cleanup, no need to report)
267
+ try {
268
+ execCommand('docker compose -f docker-compose.test.yml down', join(workspace, primaryRepo.name), { allowFailure: true });
269
+ } catch {
270
+ // Ignore cleanup errors
271
+ }
272
+
273
+ // ============================================
274
+ // STEP 7: Create Pull Request (Commit + Push + PR)
275
+ // ============================================
276
+ const prUrl = await step('Create Pull Request', async () => {
277
+ const primaryRepoDir = join(workspace, primaryRepo.name);
278
+ const featureBranch = `feature/${TICKET_KEY.toLowerCase()}`;
279
+
280
+ // Commit changes
281
+ // Committing changes - raw git output will be captured
282
+ execCommand(['git', 'add', '.'], primaryRepoDir);
283
+ execCommand(['git', 'commit', '-m', `feat(${TICKET_KEY}): ${ticket.summary}`], primaryRepoDir);
284
+
285
+ // Push to remote - raw git output will be captured
286
+ execCommand(['git', 'push', 'origin', featureBranch], primaryRepoDir);
287
+
288
+ // TODO: PR creation now handled differently (no HTTP callback)
289
+ console.log(` ⚠️ PR creation via API removed (using SQS flow)`);
290
+
291
+ results.steps.push({ name: 'create_pr', status: 'skipped' });
292
+ return null;
293
+ });
294
+
295
+ // ============================================
296
+ // STEP 8: Report Success (Upload Videos + Report)
297
+ // ============================================
298
+ await step('Report Results', async () => {
299
+ const primaryRepoDir = join(workspace, primaryRepo.name);
300
+ const videosDir = join(primaryRepoDir, 'test-results');
301
+
302
+ // Upload test videos if they exist
303
+ const videoUrls = [];
304
+ if (existsSync(videosDir)) {
305
+ // TODO: Upload videos to S3
306
+ }
307
+
308
+ // Report final results
309
+ results.status = 'completed';
310
+ results.prUrl = prUrl;
311
+ results.videoUrls = videoUrls;
312
+
313
+ await reportFinalStatus(getProgressState(), {
314
+ status: 'completed',
315
+ artifacts: { prUrl, videoUrls }
316
+ });
317
+
318
+ // Results reported
319
+ });
320
+
321
+ // Execution completed successfully
322
+
323
+ process.exit(0);
324
+
325
+ } catch (error) {
326
+ console.error('');
327
+ console.error('╔════════════════════════════════════════════════════════╗');
328
+ console.error('║ ❌ FAILED! ║');
329
+ console.error('╚════════════════════════════════════════════════════════╝');
330
+ console.error('');
331
+ console.error('Error:', error.message);
332
+ console.error('Stack:', error.stack);
333
+
334
+ try {
335
+ await reportFinalStatus(getProgressState(), { status: 'failed', error: error.message });
336
+ } catch (reportError) {
337
+ console.error('Failed to report error:', reportError.message);
338
+ }
339
+
340
+ process.exit(1);
341
+ }
342
+ }
343
+
344
+ // ============================================
345
+ // HELPER FUNCTIONS
346
+ // ============================================
347
+
348
+ function getProgressState() {
349
+ return {
350
+ EXECUTION_ID: process.env.EXECUTION_ID,
351
+ PROGRESS_API_URL: process.env.PROGRESS_API_URL,
352
+ PROGRESS_QUEUE_URL: process.env.PROGRESS_QUEUE_URL,
353
+ SQS_AUTH_TOKEN: process.env.SQS_AUTH_TOKEN,
354
+ PROJECT_API_TOKEN: process.env.PROJECT_API_TOKEN
355
+ };
356
+ }
357
+
358
+ async function step(name, fn) {
359
+ // Step name will be shown in UI, raw logs will flow through
360
+
361
+ const startTime = Date.now();
362
+ const logBuffer = [];
363
+ let lastSentLogs = '';
364
+
365
+ // Capture console.log output
366
+ const originalLog = console.log;
367
+ console.log = (...args) => {
368
+ const message = args.join(' ');
369
+ logBuffer.push(message);
370
+ originalLog(...args);
371
+ };
372
+
373
+ const progressState = getProgressState();
374
+
375
+ const liveLogInterval = setInterval(() => {
376
+ const currentLogs = logBuffer.join('\n');
377
+ if (currentLogs !== lastSentLogs && currentLogs.length > 0) {
378
+ lastSentLogs = currentLogs;
379
+ reportProgress(name, 'running', currentLogs, progressState).catch(() => {});
380
+ }
381
+ }, 2000);
382
+
383
+ try {
384
+ await reportProgress(name, 'running', '', progressState);
385
+
386
+ const result = await fn();
387
+ const duration = `${((Date.now() - startTime) / 1000).toFixed(1) }s`;
388
+
389
+ clearInterval(liveLogInterval);
390
+ console.log = originalLog;
391
+
392
+ const logs = logBuffer.join('\n');
393
+ await reportProgress(name, 'success', logs || `Completed in ${duration}`, progressState);
394
+
395
+ return result;
396
+ } catch (error) {
397
+ clearInterval(liveLogInterval);
398
+ console.log = originalLog;
399
+
400
+ const logs = logBuffer.join('\n');
401
+ await reportProgress(name, 'failed', `${logs }\n\nError: ${error.message}`, progressState);
402
+ throw error;
403
+ }
404
+ }
405
+
406
+ function execCommand(commandOrBin, cwd, options = {}) {
407
+ try {
408
+ const { spawnSync } = require('child_process');
409
+ let result;
410
+
411
+ if (Array.isArray(commandOrBin)) {
412
+ // Safe mode: [binary, ...args] — no shell, no injection
413
+ const [bin, ...args] = commandOrBin;
414
+ result = spawnSync(bin, args, {
415
+ cwd,
416
+ encoding: 'utf-8',
417
+ stdio: ['pipe', 'pipe', 'pipe']
418
+ });
419
+ } else {
420
+ // Legacy string mode — only for trusted, hardcoded commands
421
+ result = spawnSync(commandOrBin, {
422
+ cwd,
423
+ shell: true,
424
+ encoding: 'utf-8',
425
+ stdio: ['pipe', 'pipe', 'pipe']
426
+ });
427
+ }
428
+
429
+ if (result.stdout) {
430
+ console.log(result.stdout);
431
+ }
432
+ if (result.stderr) {
433
+ console.log(result.stderr);
434
+ }
435
+
436
+ if (result.status !== 0 && !options.allowFailure) {
437
+ const label = Array.isArray(commandOrBin) ? commandOrBin.join(' ') : commandOrBin;
438
+ throw new Error(`Command failed with exit code ${result.status}: ${label}`);
439
+ }
440
+
441
+ return result.stdout || result.stderr;
442
+ } catch (error) {
443
+ if (options.allowFailure) {
444
+ return null;
445
+ }
446
+ throw error;
447
+ }
448
+ }
449
+
450
+ function detectProjectInfo(repoDir) {
451
+ // Check for .zibby.yml override first
452
+ const zibbyConfigPath = join(repoDir, '.zibby.yml');
453
+ if (existsSync(zibbyConfigPath)) {
454
+ try {
455
+ const yaml = require('js-yaml');
456
+ const config = yaml.load(readFileSync(zibbyConfigPath, 'utf-8'));
457
+ return {
458
+ name: config.name || 'Custom Project',
459
+ framework: config.framework || 'Custom',
460
+ language: config.language || 'Custom',
461
+ testCommand: config.test || 'make test',
462
+ installCommand: config.install || 'make install',
463
+ custom: true
464
+ };
465
+ } catch (_error) {
466
+ console.warn('Invalid .zibby.yml, falling back to auto-detection');
467
+ }
468
+ }
469
+
470
+ // Node.js/JavaScript/TypeScript
471
+ const pkgPath = join(repoDir, 'package.json');
472
+ if (existsSync(pkgPath)) {
473
+ const pkg = JSON.parse(readFileSync(pkgPath, 'utf-8'));
474
+ const deps = { ...pkg.dependencies, ...pkg.devDependencies };
475
+
476
+ let framework = 'Node.js';
477
+ if (deps['next']) framework = 'Next.js';
478
+ else if (deps['react-scripts']) framework = 'Create React App';
479
+ else if (deps['vite'] && deps['react']) framework = 'React + Vite';
480
+ else if (deps['@angular/core']) framework = 'Angular';
481
+ else if (deps['vue']) framework = 'Vue.js';
482
+ else if (deps['express']) framework = 'Express.js';
483
+
484
+ return {
485
+ name: pkg.name || 'Unknown Project',
486
+ framework,
487
+ language: 'JavaScript/TypeScript',
488
+ testCommand: pkg.scripts?.test || 'npm test',
489
+ installCommand: 'npm install'
490
+ };
491
+ }
492
+
493
+ // Python
494
+ if (existsSync(join(repoDir, 'requirements.txt')) || existsSync(join(repoDir, 'pyproject.toml'))) {
495
+ const framework = existsSync(join(repoDir, 'manage.py')) ? 'Django' :
496
+ existsSync(join(repoDir, 'app.py')) ? 'Flask' : 'Python';
497
+ return {
498
+ name: 'Python Project',
499
+ framework,
500
+ language: 'Python',
501
+ testCommand: 'pytest',
502
+ installCommand: 'pip install -r requirements.txt'
503
+ };
504
+ }
505
+
506
+ // Ruby
507
+ if (existsSync(join(repoDir, 'Gemfile'))) {
508
+ return {
509
+ name: 'Ruby Project',
510
+ framework: 'Rails',
511
+ language: 'Ruby',
512
+ testCommand: 'bundle exec rspec',
513
+ installCommand: 'bundle install'
514
+ };
515
+ }
516
+
517
+ // Go
518
+ if (existsSync(join(repoDir, 'go.mod'))) {
519
+ return {
520
+ name: 'Go Project',
521
+ framework: 'Go',
522
+ language: 'Go',
523
+ testCommand: 'go test ./...',
524
+ installCommand: 'go mod download'
525
+ };
526
+ }
527
+
528
+ // Java/Spring
529
+ if (existsSync(join(repoDir, 'pom.xml'))) {
530
+ return {
531
+ name: 'Java Project',
532
+ framework: 'Spring Boot',
533
+ language: 'Java',
534
+ testCommand: './mvnw test',
535
+ installCommand: './mvnw install'
536
+ };
537
+ }
538
+
539
+ // Fallback
540
+ return {
541
+ name: 'Unknown Project',
542
+ framework: 'Unknown',
543
+ language: 'Unknown',
544
+ testCommand: 'make test',
545
+ installCommand: 'make install'
546
+ };
547
+ }
548
+
549
+ function buildCursorPrompt(ticket, repoInfos, devInfo) {
550
+ // Read the prompt template
551
+ const templatePath = join(__dirname, '../../prompts/implement-ticket.md');
552
+ let template;
553
+
554
+ try {
555
+ template = readFileSync(templatePath, 'utf-8');
556
+ } catch (_error) {
557
+ // Fallback if template not found
558
+ template = `
559
+ # Implement Ticket: {{TICKET_KEY}}
560
+
561
+ ## Project Context
562
+ {{PROJECT_CONTEXT}}
563
+
564
+ ## Ticket Summary
565
+ {{TICKET_SUMMARY}}
566
+
567
+ ## Description
568
+ {{TICKET_DESCRIPTION}}
569
+
570
+ ## Acceptance Criteria
571
+ {{ACCEPTANCE_CRITERIA}}
572
+
573
+ {{#if ADDITIONAL_CONTEXT}}
574
+ ## Additional Context from User
575
+ {{ADDITIONAL_CONTEXT}}
576
+
577
+ {{/if}}
578
+ ---
579
+
580
+ You are implementing this ticket. Follow these steps:
581
+
582
+ 1. Read the existing codebase and understand the patterns
583
+ 2. Implement the feature as described
584
+ 3. Write tests to verify the functionality
585
+ 4. Ensure all tests pass
586
+ 5. Fix any linter errors
587
+
588
+ Now implement this ticket completely!
589
+ `.trim();
590
+ }
591
+
592
+ // Build project context for multi-repo
593
+ const primaryRepo = repoInfos.find(r => r.isPrimary) || repoInfos[0];
594
+
595
+ // Build dev command string with context
596
+ let devCommandStr;
597
+ if (devInfo?.type === 'docker-compose') {
598
+ devCommandStr = `\`docker-compose up\` (using ${devInfo.configFile})`;
599
+ } else if (devInfo?.command) {
600
+ devCommandStr = `\`cd ${primaryRepo.name} && ${devInfo.command}\``;
601
+ } else {
602
+ devCommandStr = '`npm run dev` (or check package.json scripts)';
603
+ }
604
+
605
+ let projectContext;
606
+
607
+ if (repoInfos.length === 1) {
608
+ // Single repo
609
+ projectContext = `
610
+ You are working in **${primaryRepo.name}**, a ${primaryRepo.framework} project.
611
+
612
+ **Commands:**
613
+ - Dev server: ${devCommandStr}
614
+ - Run tests: \`cd ${primaryRepo.name} && ${primaryRepo.testCommand}\`
615
+
616
+ You have full access to the codebase in the current directory.
617
+ `.trim();
618
+ } else {
619
+ // Multi-repo setup
620
+ const repoList = repoInfos.map(r =>
621
+ `- **${r.name}/** (${r.framework})${r.isPrimary ? ' ← **MAKE CHANGES HERE**' : ' (reference only)'}`
622
+ ).join('\n');
623
+
624
+ projectContext = `
625
+ You are working in a **multi-repository** setup with ${repoInfos.length} repositories:
626
+
627
+ ${repoList}
628
+
629
+ **Primary Repository:** ${primaryRepo.name}
630
+ - This is where you should implement the feature
631
+ - Framework: ${primaryRepo.framework}
632
+ - Dev server: ${devCommandStr}
633
+ - Run tests: \`cd ${primaryRepo.name} && ${primaryRepo.testCommand}\`
634
+
635
+ **Other Repositories:**
636
+ ${repoInfos.filter(r => !r.isPrimary).map(r =>
637
+ `- **${r.name}**: You can read code from here for reference (shared libraries, services, etc.)`
638
+ ).join('\n') || '(none)'}
639
+
640
+ **Important:** Make all code changes in the \`${primaryRepo.name}/\` directory only.
641
+ `.trim();
642
+ }
643
+
644
+ // Replace placeholders with actual ticket data
645
+ let prompt = template
646
+ .replace(/\{\{TICKET_KEY\}\}/g, ticket.ticketKey || ticket.key || 'UNKNOWN')
647
+ .replace(/\{\{PROJECT_CONTEXT\}\}/g, projectContext)
648
+ .replace(/\{\{TICKET_SUMMARY\}\}/g, ticket.summary || 'No summary')
649
+ .replace(/\{\{TICKET_DESCRIPTION\}\}/g, ticket.description || 'No description provided')
650
+ .replace(/\{\{ACCEPTANCE_CRITERIA\}\}/g, ticket.acceptanceCriteria || 'Not specified');
651
+
652
+ // Handle additionalContext - replace conditional block
653
+ if (ticket.additionalContext) {
654
+ const additionalContextSection = `## Additional Context from User\n${ticket.additionalContext}\n\n`;
655
+ // Replace the entire {{#if ADDITIONAL_CONTEXT}}...{{/if}} block
656
+ prompt = prompt.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g, additionalContextSection);
657
+ } else {
658
+ // Remove the conditional block if no additional context
659
+ prompt = prompt.replace(/\{\{#if ADDITIONAL_CONTEXT\}\}[\s\S]*?\{\{\/if\}\}/g, '');
660
+ }
661
+
662
+ return prompt;
663
+ }
664
+