@testcollab/cli 1.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,753 @@
1
+ /**
2
+ * featuresync.js
3
+ *
4
+ * Git-based synchronization command for Gherkin feature files with TestCollab.
5
+ *
6
+ * This command follows the workflow described in gherkin-docs/bdd-integration/README.md:
7
+ * 1. Fetch last synced commit from server
8
+ * 2. Run git diff to find changes
9
+ * 3. Calculate hashes for old and new file versions
10
+ * 4. Resolve IDs for existing items
11
+ * 5. Build and send GherkinSyncDelta payload
12
+ */
13
+
14
+ import { simpleGit } from 'simple-git';
15
+ import * as gherkin from '@cucumber/gherkin';
16
+ import * as messages from '@cucumber/messages';
17
+ import { createHash } from 'crypto';
18
+ import path from 'path';
19
+ // fs - file
20
+ import fs from 'fs';
21
+
22
+ // Enable extra debug logs by setting BDD_SYNC_DEBUG=1
23
+ const DEBUG_BDD_SYNC = process.env.BDD_SYNC_DEBUG === '1';
24
+
25
+ /**
26
+ * Main featuresync command handler
27
+ * @param {Object} options - Command options from commander
28
+ */
29
+ export async function featuresync(options) {
30
+ try {
31
+ // Resolve API key: --api-key flag takes precedence, then TESTCOLLAB_TOKEN env var
32
+ const token = options.apiKey || process.env.TESTCOLLAB_TOKEN;
33
+ if (!token) {
34
+ console.error('āŒ Error: No API key provided');
35
+ console.error(' Pass --api-key <key> or set the TESTCOLLAB_TOKEN environment variable.');
36
+ process.exit(1);
37
+ }
38
+
39
+ // Initialize Git
40
+ const git = simpleGit();
41
+
42
+ // Check if we're in a Git repository
43
+ const isRepo = await git.checkIsRepo();
44
+ if (!isRepo) {
45
+ console.error('āŒ Error: Not in a Git repository');
46
+ console.error(' Please run this command from within a Git repository.');
47
+ process.exit(1);
48
+ }
49
+
50
+ // Check for uncommitted changes to .feature files
51
+ await checkUncommittedChanges(git);
52
+
53
+ console.log('šŸ” Fetching sync state from TestCollab...');
54
+
55
+ // Step 1: Fetch last synced commit from server
56
+ const lastSyncedCommit = await fetchSyncState(options.project, options.apiUrl, token);
57
+ console.log(`šŸ“Š Last synced commit: ${lastSyncedCommit || 'none (initial sync)'}`);
58
+
59
+ // Step 2: Get current HEAD commit
60
+ const headCommit = await git.revparse(['HEAD']);
61
+ console.log(`šŸ“Š Current HEAD commit: ${headCommit}`);
62
+
63
+ if (lastSyncedCommit === headCommit) {
64
+ console.log('āœ… Already up to date - no sync needed');
65
+ return;
66
+ }
67
+
68
+ // Step 3: Run git diff to find changes
69
+ console.log('šŸ” Analyzing changes...');
70
+ let changes;
71
+
72
+ if (lastSyncedCommit) {
73
+ // Regular sync - compare with last synced commit
74
+ const diffOptions = ['--name-status', '--find-renames', `${lastSyncedCommit}..HEAD`];
75
+
76
+ const diffResult = await git.diff(diffOptions);
77
+ console.log(diffResult);
78
+ changes = parseDiffOutput(diffResult);
79
+ } else {
80
+ // Initial sync - get all .feature files in the repository
81
+ const allFiles = await git.raw(['ls-tree', '-r', '--name-only', 'HEAD']);
82
+ const featureFiles = allFiles.split('\n')
83
+ .filter(file => file.trim().endsWith('.feature'))
84
+ .map(file => ({
85
+ status: 'A',
86
+ oldPath: null,
87
+ newPath: file.trim()
88
+ }));
89
+ changes = featureFiles;
90
+ }
91
+
92
+ console.log(`šŸ“„ Found ${changes.length} change(s)`);
93
+ if (changes.length > 0) {
94
+ console.log(' Details:');
95
+ changes.forEach((c, i) => {
96
+ const left = c.oldPath ? c.oldPath : '';
97
+ const right = c.newPath ? c.newPath : '';
98
+ const arrow = c.oldPath && c.newPath ? ' -> ' : '';
99
+ console.log(` ${String(i + 1).padStart(2, ' ')}. ${c.status} ${left}${arrow}${right}`);
100
+ });
101
+ }
102
+ if (changes.length === 0) {
103
+ console.log('āœ… No changes to sync');
104
+ return;
105
+ }
106
+
107
+ // Step 4: Process each change and calculate hashes
108
+ console.log('šŸ”§ Processing changes and calculating hashes...');
109
+ const processedChanges = [];
110
+ const oldHashes = { features: [], scenarios: [] };
111
+
112
+ for (const change of changes) {
113
+ const processed = await processChange(git, change, lastSyncedCommit);
114
+ if (processed) {
115
+ processedChanges.push(processed);
116
+
117
+ // Collect old hashes for resolve-ids call
118
+ if (processed.oldFeatureHash) {
119
+ oldHashes.features.push(processed.oldFeatureHash);
120
+ }
121
+ if (processed.oldScenarioHashes) {
122
+ oldHashes.scenarios.push(...processed.oldScenarioHashes);
123
+ }
124
+ }
125
+ }
126
+
127
+ // Step 5: Resolve IDs for existing items
128
+ console.log('šŸ” Resolving existing item IDs...');
129
+ if (DEBUG_BDD_SYNC) {
130
+ console.log(` ā†Ŗļø Requesting ID resolution for:`);
131
+ console.log(` • feature hashes: ${oldHashes.features.length}`);
132
+ console.log(` • scenario hashes: ${oldHashes.scenarios.length}`);
133
+ }
134
+ const resolvedIds = await resolveIds(options.project, oldHashes, options.apiUrl, token);
135
+ if (DEBUG_BDD_SYNC) {
136
+ const suiteKeys = Object.keys(resolvedIds.suites || {});
137
+ const caseKeys = Object.keys(resolvedIds.cases || {});
138
+ console.log(` āœ… Resolved IDs:`);
139
+ console.log(` • suites mapped: ${suiteKeys.length}`);
140
+ console.log(` • cases mapped: ${caseKeys.length}`);
141
+ if (suiteKeys.length > 0) {
142
+ const sample = suiteKeys.slice(0, 5).map(k => ({ hash: k, suiteId: resolvedIds.suites[k]?.suiteId }));
143
+ console.log(` • sample suites:`, sample);
144
+ }
145
+ if (caseKeys.length > 0) {
146
+ const sample = caseKeys.slice(0, 5).map(k => ({ hash: k, caseId: resolvedIds.cases[k]?.caseId }));
147
+ console.log(` • sample cases:`, sample);
148
+ }
149
+ }
150
+
151
+ // Step 6: Build final payload
152
+ console.log('šŸ“¦ Building sync payload...');
153
+ const payload = buildSyncPayload(
154
+ options.project,
155
+ lastSyncedCommit,
156
+ headCommit,
157
+ processedChanges,
158
+ resolvedIds
159
+ );
160
+ //console.log({payload});
161
+ // log payload in file
162
+ //const payloadFilePath = path.join(process.cwd(), 'sync-payload.json');
163
+ //fs.writeFileSync(payloadFilePath, JSON.stringify(payload, null, 2));
164
+ //console.log(`šŸ“‚ Payload written to ${payloadFilePath}`);
165
+
166
+ // Step 7: Send to TestCollab
167
+ console.log('šŸš€ Syncing with TestCollab...');
168
+ const result = await syncWithTestCollab(payload, options.apiUrl, token);
169
+
170
+ // Display results
171
+ displaySyncResults(result);
172
+ console.log('āœ… Synchronization completed successfully');
173
+
174
+ } catch (error) {
175
+ console.error(`āŒ Error: ${error.message}`);
176
+ process.exit(1);
177
+ }
178
+ }
179
+
180
+ /**
181
+ * Fetch the last synced commit SHA from TestCollab
182
+ */
183
+ async function fetchSyncState(projectId, apiUrl, token) {
184
+ const url = `${apiUrl}/bdd/sync?project=${projectId}&token=${token}`;
185
+ console.log(`Fetching sync state from: ${url}`);
186
+
187
+ try {
188
+ const response = await fetch(url, {
189
+ method: 'GET',
190
+ headers: {
191
+ 'Content-Type': 'application/json'
192
+ }
193
+ });
194
+
195
+ if (!response.ok) {
196
+ throw new Error(`Failed to fetch sync state: ${response.status} ${response.statusText}! Check if project ID and API URL are correct.`);
197
+ }
198
+
199
+ const data = await response.json();
200
+ return data.lastSyncedCommit;
201
+ } catch (error) {
202
+ throw new Error(`Failed to connect to TestCollab API: ${error.message}`);
203
+ }
204
+ }
205
+
206
+ /**
207
+ * Parse git diff output into structured changes
208
+ */
209
+ function parseDiffOutput(diffOutput) {
210
+ if (!diffOutput.trim()) {
211
+ return [];
212
+ }
213
+
214
+ const lines = diffOutput.trim().split('\n');
215
+ const changes = [];
216
+
217
+ for (const line of lines) {
218
+ const match = line.match(/^([AMDRC]\d*)\s+(.+?)(?:\s+(.+))?$/);
219
+ if (match) {
220
+ const [, status, path1, path2] = match;
221
+
222
+ const change = {
223
+ status,
224
+ oldPath: status.startsWith('D') || status.startsWith('R') ? path1 : null,
225
+ newPath: status.startsWith('A') || status.startsWith('M') || status.startsWith('R') ? (path2 || path1) : null
226
+ };
227
+
228
+ // Only include .feature files
229
+ if ((change.oldPath && change.oldPath.endsWith('.feature')) ||
230
+ (change.newPath && change.newPath.endsWith('.feature'))) {
231
+ changes.push(change);
232
+ }
233
+ }
234
+ }
235
+
236
+ return changes;
237
+ }
238
+
239
+ /**
240
+ * Process a single change from git diff
241
+ */
242
+ async function processChange(git, change, lastSyncedCommit) {
243
+ const processed = {
244
+ status: change.status,
245
+ oldPath: change.oldPath,
246
+ newPath: change.newPath
247
+ };
248
+
249
+ try {
250
+ // Get old file content for M, D, R changes (skip for A)
251
+ if (lastSyncedCommit && change.status !== 'A') {
252
+ const oldPathForLookup = change.oldPath || change.newPath;
253
+ if (oldPathForLookup) {
254
+ const oldContent = await git.show([`${lastSyncedCommit}:${oldPathForLookup}`]);
255
+ const oldParsed = parseGherkinFile(oldContent, oldPathForLookup);
256
+ if (oldParsed) {
257
+ processed.oldFeatureHash = oldParsed.featureHash;
258
+ processed.oldScenarioHashes = oldParsed.scenarios.map(s => s.hash);
259
+ processed.oldScenarios = oldParsed.scenarios; // keep titles and hashes for smarter mapping
260
+ }
261
+ }
262
+ }
263
+
264
+ // Get new file content for A, M, R changes
265
+ if (change.newPath) {
266
+ const newContent = await git.show([`HEAD:${change.newPath}`]);
267
+ const newParsed = parseGherkinFile(newContent, change.newPath);
268
+ if (newParsed) {
269
+ processed.feature = {
270
+ hash: newParsed.featureHash,
271
+ title: newParsed.feature.name,
272
+ description: newParsed.feature.FeatureDescription,
273
+ background: newParsed.feature.background,
274
+ backgroundText: newParsed.feature.backgroundText
275
+ };
276
+ processed.scenarios = newParsed.scenarios;
277
+ }
278
+ }
279
+
280
+ return processed;
281
+ } catch (error) {
282
+ console.warn(`āš ļø Warning: Could not process ${change.oldPath || change.newPath}: ${error.message}`);
283
+ return null;
284
+ }
285
+ }
286
+
287
+ /**
288
+ * Extract feature description text that appears between Feature: and Background/Scenario tags
289
+ */
290
+ function extractFeatureDescription(content) {
291
+ const lines = content.split('\n');
292
+ let description = '';
293
+ let inDescription = false;
294
+
295
+ for (let i = 0; i < lines.length; i++) {
296
+ const line = lines[i].trim();
297
+
298
+ if (line.startsWith('Feature:')) {
299
+ inDescription = true;
300
+ continue;
301
+ }
302
+
303
+ if (inDescription) {
304
+ if (line.startsWith('Background:') || line.startsWith('Scenario:')) {
305
+ break;
306
+ }
307
+
308
+ if (line && !line.startsWith('#')) {
309
+ if (description) description += '\n';
310
+ description += line;
311
+ }
312
+ }
313
+ }
314
+
315
+ return description.trim();
316
+ }
317
+
318
+ /**
319
+ * Extract any textual content inside Background: block (including non-step lines)
320
+ */
321
+ function extractBackgroundText(content) {
322
+ const lines = content.split('\n');
323
+ let inBackground = false;
324
+ const backgroundLines = [];
325
+ for (let i = 0; i < lines.length; i++) {
326
+ const line = lines[i].trim();
327
+ if (line.startsWith('Background:')) {
328
+ inBackground = true;
329
+ continue;
330
+ }
331
+ if (inBackground) {
332
+ if (
333
+ line.startsWith('Scenario:') ||
334
+ line.startsWith('Scenario Outline:') ||
335
+ line.startsWith('Rule:') ||
336
+ line.startsWith('Feature:')
337
+ ) {
338
+ break;
339
+ }
340
+ if (line && !line.startsWith('#')) {
341
+ backgroundLines.push(line);
342
+ }
343
+ }
344
+ }
345
+ return backgroundLines;
346
+ }
347
+
348
+ /**
349
+ * Parse a Gherkin file and extract structured data
350
+ */
351
+ function parseGherkinFile(content, filePath) {
352
+ try {
353
+ // Use the v33 syntax with proper Parser/AstBuilder approach
354
+ const uuidFn = messages.IdGenerator.uuid();
355
+ const builder = new gherkin.AstBuilder(uuidFn);
356
+ const matcher = new gherkin.GherkinClassicTokenMatcher();
357
+ const parser = new gherkin.Parser(builder, matcher);
358
+
359
+ // Parse the Gherkin content
360
+ const gherkinDocument = parser.parse(content);
361
+
362
+ if (!gherkinDocument || !gherkinDocument.feature) {
363
+ return null;
364
+ }
365
+
366
+ const feature = gherkinDocument.feature;
367
+ const scenarios = [];
368
+ let background = null;
369
+
370
+ // Extract feature description text that appears between Feature: and Background/Scenario
371
+ const featureDescription = extractFeatureDescription(content);
372
+ const backgroundText = extractBackgroundText(content);
373
+
374
+ // Process children to find scenarios and background
375
+ for (const child of feature.children || []) {
376
+ if (child.scenario) {
377
+ const scenario = child.scenario;
378
+ const steps = scenario.steps || [];
379
+ const stepsText = steps.map(step => `${step.keyword}${step.text}`).join('\n');
380
+ const scenarioTags = (scenario.tags || [])
381
+ .map(tag => (tag.name || '').trim())
382
+ .filter(Boolean)
383
+ .map(tagName => (tagName.startsWith('@') ? tagName.slice(1) : tagName));
384
+ const normalizedSteps = steps.map(step => `${step.keyword}${step.text}`);
385
+
386
+ scenarios.push({
387
+ hash: calculateHash(stepsText, filePath),
388
+ title: scenario.name,
389
+ steps: normalizedSteps,
390
+ tags: scenarioTags
391
+ });
392
+ } else if (child.background) {
393
+ // Background is in children, not directly on feature
394
+ background = child.background;
395
+ }
396
+ }
397
+
398
+ // Calculate feature hash based on description + background + all scenario steps
399
+ let featureContent = '';
400
+ if (featureDescription) {
401
+ featureContent += featureDescription + '\n';
402
+ }
403
+ if (background) {
404
+ const bgSteps = background.steps || [];
405
+ featureContent += bgSteps.map(step => `${step.keyword}${step.text}`).join('\n');
406
+ }
407
+ featureContent += scenarios.map(s => s.steps.map(step => `${step.keyword}${step.text}`).join('\n')).join('\n');
408
+
409
+ return {
410
+ feature: {
411
+ name: feature.name,
412
+ FeatureDescription: featureDescription || '',
413
+ background: background ? background.steps.map(step => `${step.keyword}${step.text}`) : undefined,
414
+ backgroundText: backgroundText && backgroundText.length > 0 ? backgroundText : undefined
415
+ },
416
+ featureHash: calculateHash(featureContent, filePath),
417
+ scenarios
418
+ };
419
+ } catch (error) {
420
+ throw new Error(`Failed to parse Gherkin file: ${error.message}`);
421
+ }
422
+ }
423
+
424
+ /**
425
+ * Calculate SHA-1 hash for content and file path
426
+ * Including the file path ensures renames generate new hashes
427
+ */
428
+ function calculateHash(content, filePath) {
429
+ const data = `${filePath}:${content}`;
430
+ return createHash('sha1').update(data, 'utf8').digest('hex');
431
+ }
432
+
433
+ /**
434
+ * Resolve old hashes to existing TestCollab IDs
435
+ */
436
+ async function resolveIds(projectId, hashes, apiUrl, token) {
437
+ if (hashes.features.length === 0 && hashes.scenarios.length === 0) {
438
+ return { suites: {}, cases: {} };
439
+ }
440
+
441
+ const payload = { projectId };
442
+ if (hashes.features.length > 0) {
443
+ payload.features = hashes.features;
444
+ }
445
+ if (hashes.scenarios.length > 0) {
446
+ payload.scenarios = hashes.scenarios;
447
+ }
448
+
449
+ try {
450
+ const response = await fetch(`${apiUrl}/bdd/resolve-ids?token=${token}`, {
451
+ method: 'POST',
452
+ headers: {
453
+ 'Content-Type': 'application/json',
454
+ //'Authorization': `Bearer ${token}`
455
+ },
456
+ body: JSON.stringify(payload)
457
+ });
458
+
459
+ if (!response.ok) {
460
+ throw new Error(`Failed to resolve IDs: ${response.status} ${response.statusText}`);
461
+ }
462
+
463
+ const responseData = await response.json();
464
+
465
+ // Extract the results from the nested structure
466
+ const results = responseData.results || {};
467
+ return {
468
+ suites: results.suites || {},
469
+ cases: results.cases || {}
470
+ };
471
+ } catch (error) {
472
+ throw new Error(`Failed to resolve IDs: ${error.message}`);
473
+ }
474
+ }
475
+
476
+ /**
477
+ * Build the final GherkinSyncDelta payload
478
+ */
479
+ function buildSyncPayload(projectId, prevCommit, headCommit, changes, resolvedIds) {
480
+ const payload = {
481
+ projectId: parseInt(projectId),
482
+ prevCommit,
483
+ headCommit,
484
+ changes: []
485
+ };
486
+
487
+ for (const change of changes) {
488
+ const payloadChange = {
489
+ status: change.status,
490
+ oldPath: change.oldPath,
491
+ newPath: change.newPath
492
+ };
493
+ if (DEBUG_BDD_SYNC) {
494
+ console.log(`\n🧱 Change: ${change.status} ${change.oldPath || ''} -> ${change.newPath || ''}`);
495
+ if (change.oldFeatureHash) {
496
+ console.log(` • oldFeatureHash: ${change.oldFeatureHash}`);
497
+ }
498
+ }
499
+
500
+ if (change.feature) {
501
+ payloadChange.feature = change.feature;
502
+
503
+ // Include prevHash for any non-add change (Rxx and M) so API can update suite hash
504
+ if (change.oldFeatureHash && change.status !== 'A') {
505
+ payloadChange.feature.prevHash = change.oldFeatureHash;
506
+ }
507
+
508
+ // For renames or modifications, include the suiteId if we have it
509
+ if (change.oldFeatureHash) {
510
+ const suiteInfo = resolvedIds.suites[change.oldFeatureHash];
511
+ if (suiteInfo && suiteInfo.suiteId) {
512
+ payloadChange.feature.suiteId = suiteInfo.suiteId;
513
+ if (DEBUG_BDD_SYNC) {
514
+ console.log(` • suite mapping: ${change.oldFeatureHash} -> suiteId ${suiteInfo.suiteId}`);
515
+ }
516
+ } else if (DEBUG_BDD_SYNC) {
517
+ console.log(` • suite mapping: ${change.oldFeatureHash} -> NOT FOUND`);
518
+ }
519
+ }
520
+ }
521
+
522
+ if (change.scenarios) {
523
+ // Build helper sets/maps for robust mapping
524
+ const oldHashesSet = new Set(change.oldScenarioHashes || []);
525
+ const oldTitleToHash = new Map((change.oldScenarios || []).map(s => [s.title, s.hash]));
526
+ const sameLengthAsOld = !!change.oldScenarioHashes && change.oldScenarioHashes.length === change.scenarios.length;
527
+
528
+ payloadChange.scenarios = change.scenarios.map((scenario, index) => {
529
+ const payloadScenario = {
530
+ hash: scenario.hash,
531
+ title: scenario.title
532
+ };
533
+
534
+ if (scenario.tags && scenario.tags.length > 0) {
535
+ payloadScenario.tags = scenario.tags;
536
+ }
537
+
538
+ // Determine prevHash robustly:
539
+ // 1) If steps unchanged, new hash equals some old hash → use that
540
+ if (oldHashesSet.has(scenario.hash)) {
541
+ payloadScenario.prevHash = scenario.hash;
542
+ if (DEBUG_BDD_SYNC) {
543
+ console.log(` Ā· mapping by steps-hash equality`);
544
+ }
545
+ } else if (oldTitleToHash.has(scenario.title)) {
546
+ // 2) Title unchanged → use old hash by title
547
+ payloadScenario.prevHash = oldTitleToHash.get(scenario.title);
548
+ if (DEBUG_BDD_SYNC) {
549
+ console.log(` Ā· mapping by title match`);
550
+ }
551
+ } else if (sameLengthAsOld && change.oldScenarioHashes && change.oldScenarioHashes[index]) {
552
+ // 3) Fallback: index mapping only when counts are equal
553
+ payloadScenario.prevHash = change.oldScenarioHashes[index];
554
+ if (DEBUG_BDD_SYNC) {
555
+ console.log(` Ā· mapping by index fallback`);
556
+ }
557
+ }
558
+
559
+ // Add caseId if this is an update to existing scenario (use prevHash to look up)
560
+ if (payloadScenario.prevHash) {
561
+ const caseInfo = resolvedIds.cases[payloadScenario.prevHash];
562
+ if (caseInfo && caseInfo.caseId) {
563
+ payloadScenario.caseId = caseInfo.caseId;
564
+ }
565
+ }
566
+
567
+ // Include steps based on Git status:
568
+ // - R100 = rename only, no content change → don't include steps
569
+ // - R97, R95, etc. = rename + content change → include steps
570
+ // - M = modification → include steps
571
+ // - A = addition → include steps
572
+ const shouldIncludeSteps = change.status !== 'R100';
573
+
574
+ if (shouldIncludeSteps) {
575
+ payloadScenario.steps = scenario.steps;
576
+ }
577
+
578
+ if (DEBUG_BDD_SYNC) {
579
+ console.log(` • scenario[${index}] title="${scenario.title}"`);
580
+ console.log(` - prevHash: ${payloadScenario.prevHash || 'none'}`);
581
+ console.log(` - caseId: ${payloadScenario.caseId || 'none'}`);
582
+ console.log(` - newHash: ${payloadScenario.hash}`);
583
+ console.log(` - stepsIncluded: ${shouldIncludeSteps}`);
584
+ }
585
+
586
+ return payloadScenario;
587
+ });
588
+ if (DEBUG_BDD_SYNC) {
589
+ const count = payloadChange.scenarios.length;
590
+ console.log(` • scenarios prepared: ${count}`);
591
+ }
592
+ }
593
+
594
+ // Include deleted scenarios (present before, missing now)
595
+ if (change.oldScenarioHashes && change.oldScenarioHashes.length > 0 && change.status !== 'A') {
596
+ const existingScenarios = payloadChange.scenarios || [];
597
+ const newHashes = new Set(existingScenarios.map(s => s.hash).filter(Boolean));
598
+ const newPrevHashes = new Set(existingScenarios.map(s => s.prevHash).filter(Boolean));
599
+ for (const oldHash of change.oldScenarioHashes) {
600
+ if (!newHashes.has(oldHash) && !newPrevHashes.has(oldHash)) {
601
+ existingScenarios.push({ prevHash: oldHash, deleted: true });
602
+ if (DEBUG_BDD_SYNC) {
603
+ console.log(` • scenario deleted: prevHash ${oldHash}`);
604
+ }
605
+ }
606
+ }
607
+ if (existingScenarios.length > 0) {
608
+ payloadChange.scenarios = existingScenarios;
609
+ if (DEBUG_BDD_SYNC) {
610
+ const deletedCount = existingScenarios.filter(s => s.deleted).length;
611
+ console.log(` • scenarios after deletion mark: ${existingScenarios.length} (deleted: ${deletedCount})`);
612
+ }
613
+ }
614
+ }
615
+
616
+ payload.changes.push(payloadChange);
617
+ }
618
+
619
+ if (DEBUG_BDD_SYNC) {
620
+ console.log(`\nšŸ“¦ Payload summary:`);
621
+ console.log(` • projectId: ${payload.projectId}`);
622
+ console.log(` • prevCommit: ${payload.prevCommit}`);
623
+ console.log(` • headCommit: ${payload.headCommit}`);
624
+ console.log(` • changes: ${payload.changes.length}`);
625
+ }
626
+
627
+ return payload;
628
+ }
629
+
630
+ /**
631
+ * Send the sync payload to TestCollab
632
+ */
633
+ async function syncWithTestCollab(payload, apiUrl, token) {
634
+ try {
635
+ const response = await fetch(`${apiUrl}/bdd/sync?token=${token}`, {
636
+ method: 'POST',
637
+ headers: {
638
+ 'Content-Type': 'application/json'
639
+ },
640
+ body: JSON.stringify(payload)
641
+ });
642
+
643
+ if (!response.ok) {
644
+ let errorMessage = `API request failed (${response.status})`;
645
+ try {
646
+ const errorData = await response.json();
647
+ if (errorData.message) {
648
+ errorMessage = errorData.message;
649
+ }
650
+ } catch {
651
+ const errorText = await response.text();
652
+ if (errorText) {
653
+ errorMessage = errorText;
654
+ }
655
+ }
656
+ throw new Error(errorMessage);
657
+ }
658
+
659
+ return await response.json();
660
+ } catch (error) {
661
+ throw new Error(`Sync failed: ${error.message}`);
662
+ }
663
+ }
664
+
665
+ /**
666
+ * Display sync results to the user
667
+ */
668
+ function displaySyncResults(result) {
669
+ console.log('\nšŸ“Š Synchronization Results:');
670
+
671
+ if (result.createdSuites > 0) {
672
+ console.log(`✨ Created ${result.createdSuites} suite(s)`);
673
+ }
674
+ if (result.createdCases > 0) {
675
+ console.log(`✨ Created ${result.createdCases} test case(s)`);
676
+ }
677
+ if (result.renamedSuites > 0) {
678
+ console.log(`šŸ”„ Renamed ${result.renamedSuites} suite(s)`);
679
+ }
680
+ if (result.renamedCases > 0) {
681
+ console.log(`šŸ”„ Renamed ${result.renamedCases} test case(s)`);
682
+ }
683
+ if (result.updatedCases > 0) {
684
+ console.log(`šŸ”„ Updated ${result.updatedCases} test case(s)`);
685
+ }
686
+ if (result.deletedSuites > 0) {
687
+ console.log(`šŸ—‘ļø Deleted ${result.deletedSuites} suite(s)`);
688
+ }
689
+ if (result.deletedCases > 0) {
690
+ console.log(`šŸ—‘ļø Deleted ${result.deletedCases} test case(s)`);
691
+ }
692
+
693
+ if (result.warnings && result.warnings.length > 0) {
694
+ console.log('\nāš ļø Warnings:');
695
+ result.warnings.forEach(warning => console.log(` ${warning}`));
696
+ }
697
+
698
+ // Show if no changes were made
699
+ const totalChanges = (result.createdSuites || 0) + (result.createdCases || 0) +
700
+ (result.renamedSuites || 0) + (result.renamedCases || 0) +
701
+ (result.updatedCases || 0) + (result.deletedSuites || 0) +
702
+ (result.deletedCases || 0);
703
+
704
+ if (totalChanges === 0) {
705
+ console.log('ā„¹ļø No changes were required - everything is already in sync');
706
+ }
707
+ }
708
+
709
+ /**
710
+ * Check for uncommitted changes to .feature files and warn the user
711
+ */
712
+ async function checkUncommittedChanges(git) {
713
+ try {
714
+ // Get both staged and unstaged changes
715
+ const statusResult = await git.status();
716
+
717
+ // Filter for .feature files only
718
+ const uncommittedFeatureFiles = [];
719
+
720
+ // Check staged files
721
+ statusResult.staged.forEach(file => {
722
+ if (file.endsWith('.feature')) {
723
+ uncommittedFeatureFiles.push(file);
724
+ }
725
+ });
726
+
727
+ // Check modified (unstaged) files
728
+ statusResult.modified.forEach(file => {
729
+ if (file.endsWith('.feature') && !uncommittedFeatureFiles.includes(file)) {
730
+ uncommittedFeatureFiles.push(file);
731
+ }
732
+ });
733
+
734
+ // Check created (untracked) files
735
+ statusResult.created.forEach(file => {
736
+ if (file.endsWith('.feature') && !uncommittedFeatureFiles.includes(file)) {
737
+ uncommittedFeatureFiles.push(file);
738
+ }
739
+ });
740
+
741
+ // Show warning if uncommitted changes exist
742
+ if (uncommittedFeatureFiles.length > 0) {
743
+ console.log('āš ļø Warning: You have uncommitted changes in the following .feature files:');
744
+ uncommittedFeatureFiles.forEach(file => {
745
+ console.log(` šŸ“„ ${file}`);
746
+ });
747
+ console.log(' These changes will not be synced. Please commit them first if you want them included.\n');
748
+ }
749
+ } catch (error) {
750
+ // If git status fails, just continue - don't block the sync
751
+ console.warn(`āš ļø Warning: Could not check for uncommitted changes: ${error.message}`);
752
+ }
753
+ }