create-sdd-project 0.16.10 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -24,6 +24,21 @@ const {
24
24
  updateAutonomy,
25
25
  regexReplaceInFile,
26
26
  } = require('./init-generator');
27
+ // v0.17.0: hash-based smart-diff + shared stack adaptations
28
+ const {
29
+ readMeta,
30
+ writeMeta,
31
+ computeHash,
32
+ hashFileOnDisk,
33
+ toPosix,
34
+ pruneExpectedAbsent,
35
+ expectedSmartDiffTrackedPaths,
36
+ normalizeForCompare: metaNormalizeForCompare,
37
+ } = require('./meta');
38
+ const {
39
+ applyStackAdaptations,
40
+ applyStackAdaptationsToContent,
41
+ } = require('./stack-adaptations');
27
42
 
28
43
  // --- v0.16.10: backup-before-replace helpers ---
29
44
  //
@@ -57,22 +72,16 @@ function buildBackupTimestamp() {
57
72
  }
58
73
 
59
74
  /**
60
- * Normalize text for smart-diff comparison. Strips trailing whitespace
61
- * from each line, normalizes CRLF/CR to LF, and trims leading/trailing
62
- * blank lines. Prevents false-positive "customized" flags on Windows
63
- * systems where git's `core.autocrlf=true` rewrites line endings to
64
- * `\r\n` on checkout while our template reads as `\n`. Also tolerates
65
- * editors that add/strip trailing whitespace.
75
+ * Normalize text for smart-diff comparison.
76
+ *
77
+ * v0.17.0: delegates to `lib/meta.js` which only strips CR/CRLF (Windows
78
+ * git core.autocrlf compatibility). Trailing whitespace is NO LONGER
79
+ * stripped that would destroy markdown hard-breaks (two trailing
80
+ * spaces = <br>) and silently wipe legitimate customizations (Gemini M2
81
+ * fix from plan v1.0 review). A local re-export here keeps the old
82
+ * symbol available for any pre-v0.17.0 code paths that still call it.
66
83
  */
67
- function normalizeForCompare(text) {
68
- return text
69
- .replace(/\r\n/g, '\n')
70
- .replace(/\r/g, '\n')
71
- .split('\n')
72
- .map((l) => l.replace(/[ \t]+$/, ''))
73
- .join('\n')
74
- .trim();
75
- }
84
+ const normalizeForCompare = metaNormalizeForCompare;
76
85
 
77
86
  /**
78
87
  * Copy a user file to .sdd-backup/<timestamp>/<relativePath> before it is
@@ -289,6 +298,16 @@ function generateUpgrade(config) {
289
298
  // so we can surface the list in the upgrade result summary.
290
299
  const modifiedAgentsResults = [];
291
300
 
301
+ // v0.17.0: provenance tracking. Read existing hashes at the start; track
302
+ // new/updated hashes as we go. Preserved files leave their entry untouched
303
+ // (Codex M1 invariant: only write canonical hashes for tool-written content).
304
+ // `filesToAdapt` collects POSIX paths of files that were replaced or newly
305
+ // written in this run; applyStackAdaptations will be called with this
306
+ // allowlist after the write loop so only these files get re-adapted.
307
+ const meta = readMeta(dest);
308
+ const newHashes = { ...(meta?.hashes ?? {}) };
309
+ const filesToAdapt = new Set();
310
+
292
311
  console.log(`\nUpgrading SDD DevFlow in ${config.projectName}...\n`);
293
312
  console.log(` Backup directory: .sdd-backup/${backupTimestamp}/\n`);
294
313
 
@@ -359,43 +378,118 @@ function generateUpgrade(config) {
359
378
  const templateAgentPath = path.join(srcSub, file);
360
379
  const existingAgentPath = path.join(destSub, file);
361
380
  const relativePath = path.relative(dest, existingAgentPath);
381
+ const posixPath = toPosix(relativePath);
362
382
 
363
383
  const rawTemplate = fs.readFileSync(templateAgentPath, 'utf8');
364
- const adaptedTarget = adaptAgentContentString(rawTemplate, file, projectType);
365
-
366
- if (fs.existsSync(existingAgentPath) && !config.forceTemplate) {
367
- const existingContent = fs.readFileSync(existingAgentPath, 'utf8');
368
- if (normalizeForCompare(existingContent) !== normalizeForCompare(adaptedTarget)) {
369
- // Customization detected OR template drift across versions.
370
- // Preserve user's file + save adapted target as .new so they
371
- // can manually re-merge.
372
- backupBeforeReplace(dest, relativePath, backupTimestamp);
373
- const newBackupPath = path.join(
374
- dest,
375
- '.sdd-backup',
376
- backupTimestamp,
377
- `${relativePath}.new`
384
+ const adaptedCoreTarget = adaptAgentContentString(rawTemplate, file, projectType);
385
+
386
+ // --- v0.17.0 decision tree ---
387
+ //
388
+ // Case 1: file missing or --force-template → unconditional write.
389
+ // Case 2: meta has a hash for this path → hash-based path.
390
+ // 2a. hash matches pristine, replace with adaptedCoreTarget.
391
+ // 2b. hash mismatches → customized, preserve + .new backup.
392
+ // IMPORTANT (Codex M1): do NOT update newHashes here.
393
+ // Case 3: no meta or no hash for this path → fallback path.
394
+ // 3a. Compute adaptedFullTarget by applying stack adaptations
395
+ // in-memory so init-adapted files don't false-positive
396
+ // (Gemini M1 fix).
397
+ // 3b. Content match → replace.
398
+ // 3c. Content mismatch → preserve + .new backup. Same Codex M1
399
+ // rule: preserved files do NOT get a new hash.
400
+
401
+ if (!fs.existsSync(existingAgentPath)) {
402
+ // Missing — write fresh and track for stack adaptations.
403
+ fs.writeFileSync(existingAgentPath, adaptedCoreTarget, 'utf8');
404
+ filesToAdapt.add(posixPath);
405
+ replaced++;
406
+ continue;
407
+ }
408
+
409
+ if (config.forceTemplate) {
410
+ backupBeforeReplace(dest, relativePath, backupTimestamp);
411
+ fs.writeFileSync(existingAgentPath, adaptedCoreTarget, 'utf8');
412
+ filesToAdapt.add(posixPath);
413
+ replaced++;
414
+ continue;
415
+ }
416
+
417
+ const existingContent = fs.readFileSync(existingAgentPath, 'utf8');
418
+ const storedHash = meta && meta.hashes[posixPath];
419
+
420
+ const preserveFile = (target) => {
421
+ backupBeforeReplace(dest, relativePath, backupTimestamp);
422
+ const newBackupPath = path.join(
423
+ dest,
424
+ '.sdd-backup',
425
+ backupTimestamp,
426
+ `${relativePath}.new`
427
+ );
428
+ try {
429
+ fs.mkdirSync(path.dirname(newBackupPath), { recursive: true });
430
+ fs.writeFileSync(newBackupPath, target, 'utf8');
431
+ } catch (e) {
432
+ console.warn(
433
+ ` ⚠ Failed to write .new backup for ${relativePath}: ${e.code || e.message}`
378
434
  );
379
- try {
380
- fs.mkdirSync(path.dirname(newBackupPath), { recursive: true });
381
- fs.writeFileSync(newBackupPath, adaptedTarget, 'utf8');
382
- } catch (e) {
383
- console.warn(
384
- ` ⚠ Failed to write .new backup for ${relativePath}: ${e.code || e.message}`
385
- );
386
- }
387
- modifiedAgentsResults.push({ name: relativePath, modified: true });
388
- preserved++;
435
+ }
436
+ modifiedAgentsResults.push({ name: relativePath, modified: true });
437
+ preserved++;
438
+ // Codex M1 invariant: do NOT update newHashes[posixPath]
439
+ // for preserved files. The existing hash (if any) persists.
440
+ };
441
+
442
+ if (storedHash) {
443
+ // Case 2: primary hash path.
444
+ const currentHash = computeHash(existingContent);
445
+ if (currentHash === storedHash) {
446
+ // Pristine — replace with core-adapted target. Stack
447
+ // adaptations will be applied via filesToAdapt after the
448
+ // smart-diff loop.
449
+ backupBeforeReplace(dest, relativePath, backupTimestamp);
450
+ fs.writeFileSync(existingAgentPath, adaptedCoreTarget, 'utf8');
451
+ filesToAdapt.add(posixPath);
452
+ replaced++;
389
453
  continue;
390
454
  }
391
- // Pristine: back up before overwriting (cheap insurance)
392
- backupBeforeReplace(dest, relativePath, backupTimestamp);
393
- } else if (fs.existsSync(existingAgentPath) && config.forceTemplate) {
394
- // --force-template: always back up and overwrite
455
+ // Hash mismatch preserve. The .new backup target is the
456
+ // FULL adapted target (core + stack) so the user can diff
457
+ // apples to apples against their customized file.
458
+ const adaptedFullTarget = applyStackAdaptationsToContent(
459
+ adaptedCoreTarget,
460
+ posixPath,
461
+ scan,
462
+ config
463
+ );
464
+ preserveFile(adaptedFullTarget);
465
+ continue;
466
+ }
467
+
468
+ // Case 3: fallback path — no hash available. Compare against
469
+ // the FULL adapted target (core + stack) so init-adapted files
470
+ // from pre-v0.17.0 projects don't false-positive (Gemini M1).
471
+ const adaptedFullTargetFallback = applyStackAdaptationsToContent(
472
+ adaptedCoreTarget,
473
+ posixPath,
474
+ scan,
475
+ config
476
+ );
477
+
478
+ if (
479
+ normalizeForCompare(existingContent) ===
480
+ normalizeForCompare(adaptedFullTargetFallback)
481
+ ) {
482
+ // Pristine per content compare — replace with core target.
483
+ // Stack adaptations run after the loop to finalize the file.
395
484
  backupBeforeReplace(dest, relativePath, backupTimestamp);
485
+ fs.writeFileSync(existingAgentPath, adaptedCoreTarget, 'utf8');
486
+ filesToAdapt.add(posixPath);
487
+ replaced++;
488
+ continue;
396
489
  }
397
- fs.writeFileSync(existingAgentPath, adaptedTarget, 'utf8');
398
- replaced++;
490
+
491
+ // Content mismatch → preserve. Same rule: no hash update.
492
+ preserveFile(adaptedFullTargetFallback);
399
493
  }
400
494
  continue;
401
495
  }
@@ -545,42 +639,72 @@ function generateUpgrade(config) {
545
639
  }
546
640
 
547
641
  // --- e) Replace top-level configs ---
548
- // AGENTS.md — v0.16.10 smart-diff (Change #3): mirror the standards pattern.
549
- // Compare existing file against freshly-adapted template output. If they
550
- // match, replace (safe pristine). If not, preserve + backup the new adapted
551
- // version as .new so the user can manually re-merge. --force-template
552
- // short-circuits and always replaces (with backup).
642
+ // AGENTS.md — hash-based smart-diff (v0.17.0 upgrade of v0.16.10 Change #3).
643
+ //
644
+ // Decision tree identical to the template-agent loop above:
645
+ // 1. Missing or --force-template unconditional write.
646
+ // 2. meta has a hash for AGENTS.md → hash-based path:
647
+ // 2a. hash match → pristine, replace.
648
+ // 2b. hash mismatch → preserve + .new backup. Codex M1 invariant:
649
+ // do NOT update newHashes['AGENTS.md'].
650
+ // 3. No hash → fallback content compare against the full adapted
651
+ // target. AGENTS.md has no stack adaptations (adaptAgentsMd already
652
+ // includes project-type pruning), so the comparison target is the
653
+ // adaptAgentsMd output itself.
553
654
  const agentsMdTemplate = fs.readFileSync(path.join(templateDir, 'AGENTS.md'), 'utf8');
554
655
  const adaptedAgentsMd = adaptAgentsMd(agentsMdTemplate, config, scan);
555
656
  const agentsMdDestPath = path.join(dest, 'AGENTS.md');
657
+ const AGENTS_MD_POSIX = 'AGENTS.md';
658
+
659
+ const preserveAgentsMd = () => {
660
+ backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
661
+ const newBackupPath = path.join(dest, '.sdd-backup', backupTimestamp, 'AGENTS.md.new');
662
+ try {
663
+ fs.mkdirSync(path.dirname(newBackupPath), { recursive: true });
664
+ fs.writeFileSync(newBackupPath, adaptedAgentsMd, 'utf8');
665
+ } catch (e) {
666
+ console.warn(` ⚠ Failed to write .new backup for AGENTS.md: ${e.code || e.message}`);
667
+ }
668
+ modifiedAgentsResults.push({ name: 'AGENTS.md', modified: true });
669
+ preserved++;
670
+ // Codex M1 invariant: do NOT update newHashes[AGENTS_MD_POSIX].
671
+ };
556
672
 
557
- if (fs.existsSync(agentsMdDestPath) && !config.forceTemplate) {
673
+ if (!fs.existsSync(agentsMdDestPath)) {
674
+ // Missing — write and hash fresh.
675
+ fs.writeFileSync(agentsMdDestPath, adaptedAgentsMd, 'utf8');
676
+ newHashes[AGENTS_MD_POSIX] = computeHash(adaptedAgentsMd);
677
+ replaced++;
678
+ } else if (config.forceTemplate) {
679
+ backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
680
+ fs.writeFileSync(agentsMdDestPath, adaptedAgentsMd, 'utf8');
681
+ newHashes[AGENTS_MD_POSIX] = computeHash(adaptedAgentsMd);
682
+ replaced++;
683
+ } else {
558
684
  const existingAgentsMd = fs.readFileSync(agentsMdDestPath, 'utf8');
559
- if (normalizeForCompare(existingAgentsMd) !== normalizeForCompare(adaptedAgentsMd)) {
560
- // Customization or template drift → preserve + .new backup
561
- backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
562
- const newBackupPath = path.join(dest, '.sdd-backup', backupTimestamp, 'AGENTS.md.new');
563
- try {
564
- fs.mkdirSync(path.dirname(newBackupPath), { recursive: true });
565
- fs.writeFileSync(newBackupPath, adaptedAgentsMd, 'utf8');
566
- } catch (e) {
567
- console.warn(` ⚠ Failed to write .new backup for AGENTS.md: ${e.code || e.message}`);
685
+ const storedAgentsMdHash = meta && meta.hashes[AGENTS_MD_POSIX];
686
+
687
+ if (storedAgentsMdHash) {
688
+ const currentHash = computeHash(existingAgentsMd);
689
+ if (currentHash === storedAgentsMdHash) {
690
+ backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
691
+ fs.writeFileSync(agentsMdDestPath, adaptedAgentsMd, 'utf8');
692
+ newHashes[AGENTS_MD_POSIX] = computeHash(adaptedAgentsMd);
693
+ replaced++;
694
+ } else {
695
+ preserveAgentsMd();
568
696
  }
569
- modifiedAgentsResults.push({ name: 'AGENTS.md', modified: true });
570
- preserved++;
571
- } else {
572
- // Pristine → back up and replace
697
+ } else if (
698
+ normalizeForCompare(existingAgentsMd) === normalizeForCompare(adaptedAgentsMd)
699
+ ) {
700
+ // Fallback content-compare.
573
701
  backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
574
702
  fs.writeFileSync(agentsMdDestPath, adaptedAgentsMd, 'utf8');
703
+ newHashes[AGENTS_MD_POSIX] = computeHash(adaptedAgentsMd);
575
704
  replaced++;
705
+ } else {
706
+ preserveAgentsMd();
576
707
  }
577
- } else {
578
- // Missing file, or --force-template: always back up (if exists) and overwrite
579
- if (fs.existsSync(agentsMdDestPath)) {
580
- backupBeforeReplace(dest, 'AGENTS.md', backupTimestamp);
581
- }
582
- fs.writeFileSync(agentsMdDestPath, adaptedAgentsMd, 'utf8');
583
- replaced++;
584
708
  }
585
709
 
586
710
  // CLAUDE.md / GEMINI.md (back up before replace, not smart-diff'd)
@@ -640,17 +764,33 @@ function generateUpgrade(config) {
640
764
  replaced++;
641
765
  }
642
766
 
643
- // --- e3) .gitignore — idempotent append of .sdd-backup/ (v0.16.10) ---
644
- // Existing projects created before v0.16.10 don't have .sdd-backup/ in their
645
- // .gitignore. Append it once so backup dirs aren't accidentally committed.
767
+ // --- e3) .gitignore — idempotent append of .sdd-backup/ (v0.16.10)
768
+ // and .sdd-meta.json (v0.17.0) ---
769
+ // Existing projects created before these versions don't have the
770
+ // entries in their .gitignore. Append them once so the files aren't
771
+ // accidentally committed.
646
772
  const userGitignorePath = path.join(dest, '.gitignore');
647
773
  if (fs.existsSync(userGitignorePath)) {
648
- const existingGitignore = fs.readFileSync(userGitignorePath, 'utf8');
774
+ let existingGitignore = fs.readFileSync(userGitignorePath, 'utf8');
775
+ let updatedGitignore = false;
776
+
649
777
  if (!/^\s*\/?\.sdd-backup\/?\s*$/m.test(existingGitignore)) {
650
778
  const appendBlock = '\n\n# sdd-devflow upgrade backups (ignored — kept locally for recovery only)\n.sdd-backup/\n';
651
- fs.writeFileSync(userGitignorePath, existingGitignore.trimEnd() + appendBlock, 'utf8');
779
+ existingGitignore = existingGitignore.trimEnd() + appendBlock;
780
+ updatedGitignore = true;
652
781
  step('Updated .gitignore with .sdd-backup/ entry');
653
782
  }
783
+
784
+ if (!/^\s*\/?\.sdd-meta\.json\s*$/m.test(existingGitignore)) {
785
+ const appendBlock = '\n\n# sdd-devflow provenance tracking (local-only, content-addressable hashes)\n.sdd-meta.json\n';
786
+ existingGitignore = existingGitignore.trimEnd() + appendBlock;
787
+ updatedGitignore = true;
788
+ step('Updated .gitignore with .sdd-meta.json entry');
789
+ }
790
+
791
+ if (updatedGitignore) {
792
+ fs.writeFileSync(userGitignorePath, existingGitignore, 'utf8');
793
+ }
654
794
  }
655
795
 
656
796
  // --- f) Adapt for project type ---
@@ -673,30 +813,33 @@ function generateUpgrade(config) {
673
813
  }
674
814
  }
675
815
 
676
- // Adapt agent/skill content for project type
816
+ // Adapt agent/skill content for project type (single-stack pruning —
817
+ // removes frontend/backend refs). Separate from stack substitutions
818
+ // (Zod/ORM/DDD). Safe to run on all files because the pruning rules
819
+ // use literal template strings that only appear in raw template.
677
820
  if (projectType !== 'fullstack') {
678
821
  adaptAgentContentForProjectType(dest, config, regexReplaceInFile);
679
822
  }
680
823
 
681
- // Adapt copied files for detected stack (Zod, Prisma, DDD, etc.)
682
- adaptCopiedFiles(dest, scan, config);
683
-
684
- // Adapt documentation-standards for project type
685
- const docStdPath2 = path.join(dest, 'ai-specs', 'specs', 'documentation-standards.mdc');
686
- if (fs.existsSync(docStdPath2)) {
687
- if (projectType === 'backend') {
688
- regexReplaceInFile(docStdPath2, [
689
- [/\| `ai-specs\/specs\/frontend-standards\.mdc` \|[^\n]*\n/, ''],
690
- [/\| `docs\/specs\/ui-components\.md` \|[^\n]*\n/, ''],
691
- [/ - UI component changes `docs\/specs\/ui-components\.md`\n/, ''],
692
- ]);
693
- } else if (projectType === 'frontend') {
694
- regexReplaceInFile(docStdPath2, [
695
- [/\| `ai-specs\/specs\/backend-standards\.mdc` \|[^\n]*\n/, ''],
696
- [/\| `docs\/specs\/api-spec\.yaml` \|[^\n]*\n/, ''],
697
- ]);
698
- }
824
+ // v0.17.0: Stack adaptations run ONLY on files that were replaced or
825
+ // newly written in this run. Preserved (customized) files MUST NOT be
826
+ // touched by stack adaptations, otherwise their user edits could be
827
+ // mangled by the rule replacements (Codex M1 + plan v1.1 § Allowlist
828
+ // semantics).
829
+ //
830
+ // SKILL.md, ticket-template.md, and documentation-standards.mdc were
831
+ // wholesale-recopied earlier in the upgrade (via fs.cpSync and the
832
+ // standards pipeline), so they are always in the "replaced" state and
833
+ // must be in the allowlist.
834
+ for (const dir of toolDirs) {
835
+ filesToAdapt.add(toPosix(`${dir}/skills/development-workflow/SKILL.md`));
836
+ filesToAdapt.add(
837
+ toPosix(`${dir}/skills/development-workflow/references/ticket-template.md`)
838
+ );
699
839
  }
840
+ filesToAdapt.add(toPosix('ai-specs/specs/documentation-standards.mdc'));
841
+
842
+ applyStackAdaptations(dest, scan, config, filesToAdapt);
700
843
 
701
844
  step('Adapted files for project type and stack');
702
845
 
@@ -732,6 +875,32 @@ function generateUpgrade(config) {
732
875
  fs.writeFileSync(path.join(dest, '.sdd-version'), newVersion + '\n', 'utf8');
733
876
  step(`Updated .sdd-version to ${newVersion}`);
734
877
 
878
+ // --- g1) v0.17.0: update .sdd-meta.json ---
879
+ //
880
+ // For every smart-diff-tracked file that was replaced or newly written
881
+ // in this run (i.e. in filesToAdapt AND in the expected tracked set),
882
+ // recompute its hash from the post-adaptation on-disk content and merge
883
+ // into newHashes. Preserved files are NOT in filesToAdapt, so their old
884
+ // hash (if any) is left alone — Codex M1 invariant.
885
+ //
886
+ // Then prune hashes for paths that are no longer expected for this
887
+ // (aiTools, projectType) combination (e.g. single-stack removed a
888
+ // frontend agent). User-deleted files that ARE expected keep their
889
+ // hash, since the next upgrade will recreate the file from template.
890
+ {
891
+ const trackedSet = expectedSmartDiffTrackedPaths(aiTools, projectType);
892
+ for (const posixPath of filesToAdapt) {
893
+ if (!trackedSet.has(posixPath)) continue;
894
+ const absPath = path.join(dest, ...posixPath.split('/'));
895
+ const h = hashFileOnDisk(absPath);
896
+ if (h !== null) {
897
+ newHashes[posixPath] = h;
898
+ }
899
+ }
900
+ const prunedHashes = pruneExpectedAbsent(newHashes, aiTools, projectType);
901
+ writeMeta(dest, prunedHashes);
902
+ }
903
+
735
904
  // --- Show result ---
736
905
  const updatedCount = standardsResults.filter((s) => !s.modified).length;
737
906
  const preservedCount = modifiedStandards.length;
package/package.json CHANGED
@@ -1,6 +1,6 @@
1
1
  {
2
2
  "name": "create-sdd-project",
3
- "version": "0.16.10",
3
+ "version": "0.17.0",
4
4
  "description": "Create a new SDD DevFlow project with AI-assisted development workflow",
5
5
  "bin": {
6
6
  "create-sdd-project": "bin/cli.js"
@@ -53,3 +53,6 @@ docs/project_notes/pm-stop.md
53
53
 
54
54
  # sdd-devflow upgrade backups (ignored — kept locally for recovery only)
55
55
  .sdd-backup/
56
+
57
+ # sdd-devflow provenance tracking (local-only, content-addressable hashes)
58
+ .sdd-meta.json