create-sdd-project 0.16.9 → 0.17.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,8 @@ const {
8
8
  BACKEND_AGENTS,
9
9
  } = require('./config');
10
10
  const { adaptAgentContentForProjectType } = require('./adapt-agents');
11
+ const { applyStackAdaptations } = require('./stack-adaptations');
12
+ const { writeMeta, computeInstallHashes } = require('./meta');
11
13
 
12
14
  /**
13
15
  * Install SDD DevFlow into an existing project.
@@ -252,6 +254,24 @@ function generateInit(config) {
252
254
  const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
253
255
  fs.writeFileSync(path.join(dest, '.sdd-version'), pkg.version + '\n', 'utf8');
254
256
 
257
+ // v0.17.0: write provenance hashes. Computed AFTER all adaptations
258
+ // so the hashes reflect the exact post-install content on disk. The
259
+ // next `--upgrade` uses these hashes to answer "did the user edit
260
+ // this file?" precisely, avoiding cross-version false-positive
261
+ // preserve warnings (Codex P1 from v0.16.10 plan review).
262
+ //
263
+ // Codex round 2 P1 fix: exclude pre-existing files that --init skipped.
264
+ // Those belong to the user, not to SDD DevFlow, and must NOT be marked
265
+ // as tool-canonical — otherwise the next upgrade would hash-match the
266
+ // user's content and silently overwrite it. `skipped` already contains
267
+ // the relative paths of files that were skipped due to pre-existence;
268
+ // convert them to POSIX form and pass as the exclude set.
269
+ const skippedPosix = new Set(skipped.map((p) => p.split('\\').join('/')));
270
+ writeMeta(
271
+ dest,
272
+ computeInstallHashes(dest, config.aiTools, config.projectType, skippedPosix)
273
+ );
274
+
255
275
  // Done
256
276
  console.log(`\nDone! Next steps:`);
257
277
  console.log(` git add -A && git commit -m "chore: add SDD DevFlow to existing project"`);
@@ -343,163 +363,22 @@ function regexReplaceInFile(filePath, replacements) {
343
363
  }
344
364
 
345
365
  function adaptCopiedFiles(dest, scan, config) {
346
- const orm = scan.backend.orm || 'your ORM';
347
- const db = scan.backend.db || 'your database';
348
-
349
- // Common Zod generic validation replacements (all agents + skills)
350
- // Phase 1: Replace "Zod" with "validation" (most specific first)
351
- const zodReplacements = [
352
- ['Zod data schemas', 'validation schemas'],
353
- ['Zod schemas', 'validation schemas'],
354
- ];
355
- // Phase 2: Clean up shared/src/schemas/ path (Zod-specific convention)
356
- // Applied AFTER phase 1, so these match the post-replacement text
357
- const schemaPathReplacements = [
358
- ['validation schemas in `shared/src/schemas/` if applicable', 'validation schemas if applicable'],
359
- ['validation schemas in `shared/src/schemas/` (if shared workspace exists)', 'validation schemas (if shared workspace exists)'],
360
- ['validation schemas in `shared/src/schemas/`', 'validation schemas'],
361
- ['validation schemas (`shared/src/schemas/`)', 'validation schemas'],
362
- ['`shared/src/schemas/` (if exists) for current validation schemas', 'project validation schemas'],
363
- // Gemini spec-creator: no "Zod" prefix, standalone path reference
364
- ['and `shared/src/schemas/` (if exists)', ''],
365
- ['schemas vs `shared/src/schemas/`', 'validation schemas up to date'],
366
- ];
367
-
368
- // ORM/DB replacements for backend agents
369
- let ormReplacements = [];
370
- if (scan.backend.orm && scan.backend.orm !== 'Prisma') {
371
- ormReplacements = [
372
- ['Prisma ORM, and PostgreSQL', `${orm}${db !== 'your database' ? `, and ${db}` : ''}`],
373
- ['Repository implementations (Prisma)', `Repository implementations (${orm})`],
374
- ];
375
- } else if (!scan.backend.orm) {
376
- // No ORM detected — remove Prisma references with generic text
377
- const dbLabel = db !== 'your database' ? `, and ${db}` : '';
378
- ormReplacements = [
379
- ['Prisma ORM, and PostgreSQL', dbLabel ? dbLabel.slice(6) : 'your database'],
380
- ['Repository implementations (Prisma)', 'Repository implementations'],
381
- ];
382
- }
383
-
384
- // Apply to all AI tool directories
385
- const toolDirs = [];
386
- if (config.aiTools !== 'gemini') toolDirs.push('.claude');
387
- if (config.aiTools !== 'claude') toolDirs.push('.gemini');
388
-
389
- for (const dir of toolDirs) {
390
- // Backend agents: Zod + ORM replacements
391
- if (scan.backend.validation !== 'Zod') {
392
- const backendAgentReplacements = [...zodReplacements, ...ormReplacements];
393
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, backendAgentReplacements);
394
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, backendAgentReplacements);
395
- // Phase 2: clean up shared/src/schemas/ paths
396
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, schemaPathReplacements);
397
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, schemaPathReplacements);
398
- } else if (ormReplacements.length > 0) {
399
- // Zod detected but different ORM — only ORM replacements
400
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, ormReplacements);
401
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, ormReplacements);
402
- }
403
-
404
- // Multi-purpose agents: Zod replacements only
405
- if (scan.backend.validation !== 'Zod') {
406
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
407
- replaceInCopiedFile(dest, `${dir}/agents/spec-creator.md`, allZodReplacements);
408
- replaceInCopiedFile(dest, `${dir}/agents/production-code-validator.md`, allZodReplacements);
409
- replaceInCopiedFile(dest, `${dir}/agents/database-architect.md`, allZodReplacements);
410
- }
411
-
412
- // Skills: Zod + schema path replacements
413
- if (scan.backend.validation !== 'Zod') {
414
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
415
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/SKILL.md`, allZodReplacements);
416
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/references/ticket-template.md`, allZodReplacements);
417
- }
418
- }
419
-
420
- // Architecture adaptation: DDD-specific content in backend agents
421
- const arch = scan.srcStructure ? scan.srcStructure.pattern : 'ddd';
422
- if (arch !== 'ddd') {
423
- for (const dir of toolDirs) {
424
- // backend-planner: adapt header, exploration paths, implementation order, rules
425
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-planner.md'), [
426
- // Header: remove DDD reference (Claude verbose format)
427
- ['specializing in Domain-Driven Design (DDD) layered architecture with deep knowledge of',
428
- 'specializing in layered architecture with deep knowledge of'],
429
- // Header: remove DDD reference (Gemini condensed format)
430
- ['(DDD architecture)', '(layered architecture)'],
431
- // Exploration: replace DDD-specific paths with generic (number-agnostic)
432
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
433
- [/\d+\. Explore existing domain entities, services, validators, repositories\n/,
434
- '5. Explore the codebase for existing patterns, layer structure, and reusable code\n'],
435
- [/\d+\. Explore `backend\/src\/infrastructure\/` for existing repositories\n/, ''],
436
- // Implementation Order (Claude format)
437
- ['following DDD layer order: Domain > Application > Infrastructure > Presentation > Tests',
438
- 'following the layer order defined in backend-standards.mdc'],
439
- // Implementation Order (Gemini format)
440
- ['Implementation Order (Domain > Application > Infrastructure > Presentation > Tests)',
441
- 'Implementation Order (see backend-standards.mdc for layer order)'],
442
- // Rules (Claude format)
443
- ['Follow DDD layer separation: Domain > Application > Infrastructure > Presentation',
444
- 'Follow the layer separation defined in backend-standards.mdc'],
445
- ]);
446
- // backend-developer: adapt frontmatter, header, exploration, implementation order, rules
447
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-developer.md'), [
448
- // Frontmatter (Claude format)
449
- ['follows DDD layered architecture',
450
- 'follows layered architecture'],
451
- // Header (Claude format)
452
- ['specializing in Domain-Driven Design (DDD) with',
453
- 'specializing in layered architecture with'],
454
- // Header (Gemini condensed format)
455
- ['(DDD architecture)', '(layered architecture)'],
456
- // Exploration: remove shared/src/schemas reference (number-agnostic)
457
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
458
- // Implementation Order (Claude format): replace DDD layers
459
- ['Follow the DDD layer order from the plan:',
460
- 'Follow the layer order from the plan (see backend-standards.mdc for project layers):'],
461
- [/\d+\. \*\*Domain Layer\*\*: Entities, value objects, repository interfaces, domain errors\n/,
462
- '1. **Data Layer**: Models, database operations, data access\n'],
463
- [/\d+\. \*\*Application Layer\*\*: Services, validators, DTOs\n/,
464
- '2. **Business Logic Layer**: Controllers, services, external integrations\n'],
465
- [/\d+\. \*\*Infrastructure Layer\*\*: Repository implementations \([^)]*\), external integrations\n/,
466
- '3. **Presentation Layer**: Routes, handlers, middleware\n'],
467
- [/\d+\. \*\*Presentation Layer\*\*: Controllers, routes, middleware\n/,
468
- '4. **Integration Layer**: Wiring, configuration, server registration\n'],
469
- // Implementation Order (Gemini format)
470
- ['Follow DDD layer order: Domain > Application > Infrastructure > Presentation.',
471
- 'Follow the layer order defined in backend-standards.mdc.'],
472
- // Rules (Claude format)
473
- ['**ALWAYS** follow DDD layer separation',
474
- '**ALWAYS** follow the layer separation defined in backend-standards.mdc'],
475
- ['**ALWAYS** handle errors with custom domain error classes',
476
- '**ALWAYS** handle errors following the patterns in backend-standards.mdc'],
477
- // Rules (Gemini format)
478
- ['ALWAYS handle errors with domain error classes',
479
- 'ALWAYS handle errors following the patterns in backend-standards.mdc'],
480
- // Documentation: remove shared/src/schemas mandatory update
481
- [/- (?:\*\*MANDATORY\*\*: )?If modifying a DB schema → update .* schemas in `shared\/src\/schemas\/` BEFORE continuing\n/, ''],
482
- ]);
483
- }
484
- }
485
-
486
- // Agent/skill content: remove frontend/backend-specific references for single-stack projects
366
+ // v0.17.0: delegate stack-specific adaptations (Zod → validation,
367
+ // ORM swap, DDD layered, documentation-standards.mdc project-type
368
+ // pruning) to the shared module in lib/stack-adaptations.js. This
369
+ // makes the exact same transformation available to upgrade-generator.js
370
+ // for the hash-based smart-diff replacement path.
371
+ //
372
+ // The in-memory variant (applyStackAdaptationsToContent) is also used
373
+ // by the upgrade fallback path to construct the "what init would have
374
+ // written" comparison target when .sdd-meta.json is missing (Gemini M1
375
+ // fix from plan v1.0 cross-model review).
376
+ applyStackAdaptations(dest, scan, config, null);
377
+
378
+ // Remove frontend/backend-specific references for single-stack
379
+ // projects. Separate from stack adaptations this is project-type
380
+ // pruning, not stack substitution.
487
381
  adaptAgentContentForProjectType(dest, config, regexReplaceInFile);
488
-
489
- // documentation-standards.mdc: remove irrelevant rows based on project type
490
- const docStdPath = path.join(dest, 'ai-specs', 'specs', 'documentation-standards.mdc');
491
- if (fs.existsSync(docStdPath)) {
492
- let content = fs.readFileSync(docStdPath, 'utf8');
493
- if (config.projectType === 'backend') {
494
- content = content.replace(/\| `ai-specs\/specs\/frontend-standards\.mdc` \|[^\n]*\n/, '');
495
- content = content.replace(/\| `docs\/specs\/ui-components\.md` \|[^\n]*\n/, '');
496
- content = content.replace(/ - UI component changes → `docs\/specs\/ui-components\.md`\n/, '');
497
- } else if (config.projectType === 'frontend') {
498
- content = content.replace(/\| `ai-specs\/specs\/backend-standards\.mdc` \|[^\n]*\n/, '');
499
- content = content.replace(/\| `docs\/specs\/api-spec\.yaml` \|[^\n]*\n/, '');
500
- }
501
- fs.writeFileSync(docStdPath, content, 'utf8');
502
- }
503
382
  }
504
383
 
505
384
  // --- Standards Adaptation ---
@@ -746,54 +625,82 @@ function findSrcRootName(scan) {
746
625
  function adaptAgentsMd(template, config, scan) {
747
626
  let content = template;
748
627
 
749
- // Replace project structure with actual directories
750
- const rootDirs = scan.rootDirs;
751
- const tree = rootDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
752
- const treeBlock = `\`\`\`\nproject/\n${tree}\n└── docs/ ← Documentation\n\`\`\``;
753
-
754
- // Robust: flexible whitespace between CONFIG comment and code block
755
- content = content.replace(
756
- /<!-- CONFIG: Adjust directories[^>]*-->\n+```\nproject\/\n[\s\S]*?```/,
757
- treeBlock
758
- );
759
-
760
- // If not monorepo, simplify the install instructions
761
- // Robust: match any number of table rows (not hardcoded count)
762
- if (!scan.isMonorepo) {
628
+ // v0.16.10: guard every replacement against an empty/unhelpful scan.
629
+ // The previous version unconditionally rewrote the project tree and the
630
+ // Standards References line, which produced broken output like
631
+ // `Backend patterns ()` (empty parens) when the scanner couldn't detect a
632
+ // framework. That was the root cause of the foodXPlorer v0.16.9 regression.
633
+ // Now each replacement only runs when the scan produced enough information
634
+ // to improve on the template defaults; otherwise the template stays.
635
+
636
+ // Replace project structure with actual directories — only when the scanner
637
+ // detected at least one meaningful (non-SDD-infrastructure) directory.
638
+ const rootDirs = scan.rootDirs || [];
639
+ const meaningfulDirs = rootDirs.filter((d) => {
640
+ const norm = d.replace(/\/$/, '');
641
+ return norm !== 'docs' && norm !== 'ai-specs';
642
+ });
643
+ if (meaningfulDirs.length > 0) {
644
+ // v0.17.0: use meaningfulDirs (not rootDirs) to build the tree. The
645
+ // SDD-infrastructure dirs (ai-specs/, docs/) get installed BY init
646
+ // itself, so at install time they're absent from the scan but at
647
+ // upgrade time they're present. Using rootDirs would produce
648
+ // different content in each scan → hash drift → false-positive
649
+ // preserve warnings on every upgrade. Using meaningfulDirs stabilizes
650
+ // the output across install and upgrade for the same project.
651
+ const tree = meaningfulDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
652
+ const treeBlock = `\`\`\`\nproject/\n${tree}\n└── docs/ ← Documentation\n\`\`\``;
763
653
  content = content.replace(
764
- /\*\*Critical\*\*: NEVER install dependencies in the root directory\.\n\n(\|.*\n)+/,
765
- ''
654
+ /<!-- CONFIG: Adjust directories[^>]*-->\n+```\nproject\/\n[\s\S]*?```/,
655
+ treeBlock
766
656
  );
657
+
658
+ // If not monorepo, simplify the install instructions (only when we
659
+ // actually rewrote the tree, otherwise leave the template alone).
660
+ if (!scan.isMonorepo) {
661
+ content = content.replace(
662
+ /\*\*Critical\*\*: NEVER install dependencies in the root directory\.\n\n(\|.*\n)+/,
663
+ ''
664
+ );
665
+ }
767
666
  }
768
667
 
769
- // Adapt Standards References descriptions
770
- if (scan.backend.detected) {
771
- const parts = [scan.srcStructure.pattern ? patternLabelFor(scan.srcStructure.pattern) : null, scan.backend.framework, scan.backend.orm].filter(Boolean);
772
- content = content.replace(
773
- 'Backend patterns (DDD, Express, Prisma)',
774
- `Backend patterns (${parts.join(', ')})`
775
- );
668
+ // Adapt Backend Standards description — only when we have enough stack info
669
+ // to build a non-empty parenthetical. Otherwise leave the template default.
670
+ if (scan.backend && scan.backend.detected) {
671
+ const parts = [
672
+ scan.srcStructure && scan.srcStructure.pattern ? patternLabelFor(scan.srcStructure.pattern) : null,
673
+ scan.backend.framework,
674
+ scan.backend.orm,
675
+ ].filter(Boolean);
676
+ if (parts.length > 0) {
677
+ content = content.replace(
678
+ 'Backend patterns (DDD, Express, Prisma)',
679
+ `Backend patterns (${parts.join(', ')})`
680
+ );
681
+ }
776
682
  }
777
- if (scan.frontend.detected) {
683
+
684
+ if (scan.frontend && scan.frontend.detected) {
778
685
  const parts = [scan.frontend.framework, scan.frontend.styling, scan.frontend.components].filter(Boolean);
779
- content = content.replace(
780
- 'Frontend patterns (Next.js, Tailwind, Radix)',
781
- `Frontend patterns (${parts.join(', ')})`
782
- );
783
- } else {
784
- // Remove frontend-standards reference for backend-only projects
785
- content = content.replace(
786
- /- \[Frontend Standards\].*\n/,
787
- ''
788
- );
686
+ if (parts.length > 0) {
687
+ content = content.replace(
688
+ 'Frontend patterns (Next.js, Tailwind, Radix)',
689
+ `Frontend patterns (${parts.join(', ')})`
690
+ );
691
+ }
789
692
  }
790
693
 
791
- if (!scan.backend.detected) {
792
- // Remove backend-standards reference for frontend-only projects
793
- content = content.replace(
794
- /- \[Backend Standards\].*\n/,
795
- ''
796
- );
694
+ // Standards-links pruning: use the authoritative `config.projectType`
695
+ // instead of scanner detection. Scanner is unreliable on freshly-scaffolded
696
+ // projects (no real source code yet) and would cause cross-path drift
697
+ // between scaffold and upgrade. config.projectType comes from user choice
698
+ // at scaffold time or detectProjectType() at upgrade time (which reads
699
+ // from existing files, not source patterns).
700
+ if (config && config.projectType === 'backend') {
701
+ content = content.replace(/- \[Frontend Standards\].*\n/, '');
702
+ } else if (config && config.projectType === 'frontend') {
703
+ content = content.replace(/- \[Backend Standards\].*\n/, '');
797
704
  }
798
705
 
799
706
  return content;
@@ -1048,9 +955,25 @@ function appendGitignore(dest, skipped) {
1048
955
  const sddEntries = '\n# SDD DevFlow\ndocs/tickets/*.md\n!docs/tickets/.gitkeep\n';
1049
956
 
1050
957
  if (fs.existsSync(gitignorePath)) {
1051
- const content = fs.readFileSync(gitignorePath, 'utf8');
958
+ let content = fs.readFileSync(gitignorePath, 'utf8');
959
+ let appended = false;
1052
960
  if (!content.includes('SDD DevFlow')) {
1053
- fs.appendFileSync(gitignorePath, sddEntries, 'utf8');
961
+ content = content.trimEnd() + sddEntries;
962
+ appended = true;
963
+ }
964
+ // v0.17.0: ensure .sdd-meta.json + .sdd-backup/ are ignored (local-only
965
+ // metadata). Codex round 2 P2 fix — --init must also append these
966
+ // entries idempotently, matching the upgrade path.
967
+ if (!/^\s*\/?\.sdd-backup\/?\s*$/m.test(content)) {
968
+ content = content.trimEnd() + '\n\n# sdd-devflow upgrade backups (ignored — kept locally for recovery only)\n.sdd-backup/\n';
969
+ appended = true;
970
+ }
971
+ if (!/^\s*\/?\.sdd-meta\.json\s*$/m.test(content)) {
972
+ content = content.trimEnd() + '\n\n# sdd-devflow provenance tracking (local-only, content-addressable hashes)\n.sdd-meta.json\n';
973
+ appended = true;
974
+ }
975
+ if (appended) {
976
+ fs.writeFileSync(gitignorePath, content, 'utf8');
1054
977
  step('Appended SDD entries to .gitignore');
1055
978
  }
1056
979
  } else {
package/lib/meta.js ADDED
@@ -0,0 +1,291 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * SDD DevFlow provenance tracking — v0.17.0+
5
+ *
6
+ * The `.sdd-meta.json` file stores content-addressable hashes of files the
7
+ * tool considers "canonically tool-owned" (template agents + AGENTS.md in
8
+ * v0.17.0). The upgrade path uses these hashes to answer the question
9
+ * "has the user edited this file since the last time the tool wrote it?"
10
+ * precisely, without comparing against the new template's adapted output
11
+ * (which drifts across versions and causes false-positive preserve
12
+ * warnings on cross-version upgrades — the Codex P1 finding from the
13
+ * v0.16.10 cross-model review).
14
+ *
15
+ * Core invariant (Codex M1 from plan v1.0 review): a hash in this file
16
+ * represents "the last time the tool wrote this file, the content hashed
17
+ * to X". Hashes are ONLY written/updated when the tool actually wrote
18
+ * canonical output to a file in the current run (replaced, new, or
19
+ * --force-template paths). Preserved files leave their hash entry
20
+ * untouched — otherwise the user's customized content would be hashed and
21
+ * silently overwritten on the next upgrade.
22
+ *
23
+ * File format (schemaVersion: 1):
24
+ * {
25
+ * "schemaVersion": 1,
26
+ * "hashes": {
27
+ * ".claude/agents/backend-planner.md": "sha256:abc...",
28
+ * "AGENTS.md": "sha256:def...",
29
+ * ...
30
+ * }
31
+ * }
32
+ *
33
+ * Path keys are POSIX-normalized (forward slashes) on ALL platforms so
34
+ * lookups work consistently on Windows where path.join would otherwise
35
+ * produce backslashes (Gemini M2 fix).
36
+ */
37
+
38
+ const fs = require('node:fs');
39
+ const path = require('node:path');
40
+ const { createHash } = require('node:crypto');
41
+
42
+ const { FRONTEND_AGENTS, BACKEND_AGENTS, TEMPLATE_AGENTS } = require('./config');
43
+
44
+ const META_FILE = '.sdd-meta.json';
45
+ const CURRENT_SCHEMA_VERSION = 1;
46
+
47
+ /**
48
+ * Normalize text for content-addressable hashing.
49
+ *
50
+ * v0.17.0: only strip CR / CRLF line endings (Windows git core.autocrlf
51
+ * compatibility). Do NOT strip trailing whitespace per line — that would
52
+ * destroy markdown hard-breaks (two trailing spaces render as <br>) and
53
+ * silently wipe user customizations that only touched whitespace
54
+ * (Gemini M2 fix).
55
+ *
56
+ * Trade-off: editors configured to "trim trailing whitespace on save"
57
+ * (e.g. VSCode files.trimTrailingWhitespace=true) will produce a hash
58
+ * mismatch even without semantic edits. This is a conservative false
59
+ * positive — the upgrade preserves the file and the user can re-run
60
+ * with --force-template to accept the new template content. The
61
+ * alternative (silent wipe of markdown hard-breaks) is strictly worse.
62
+ */
63
+ function normalizeForCompare(text) {
64
+ return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
65
+ }
66
+
67
+ /**
68
+ * Compute the content-addressable hash of a string.
69
+ *
70
+ * Returns 'sha256:<hex>'. The prefix is mandatory so v0.17.x can
71
+ * introduce additional algorithms (e.g. 'blake3:...') without breaking
72
+ * old readers — equality comparison on the full string handles upgrades
73
+ * naturally.
74
+ */
75
+ function computeHash(content) {
76
+ const digest = createHash('sha256').update(normalizeForCompare(content), 'utf8').digest('hex');
77
+ return `sha256:${digest}`;
78
+ }
79
+
80
+ /**
81
+ * Compute the hash of a file on disk, or null if it doesn't exist.
82
+ * Reads as UTF-8 (all tracked files are text).
83
+ */
84
+ function hashFileOnDisk(absPath) {
85
+ if (!fs.existsSync(absPath)) return null;
86
+ try {
87
+ return computeHash(fs.readFileSync(absPath, 'utf8'));
88
+ } catch {
89
+ return null;
90
+ }
91
+ }
92
+
93
+ /**
94
+ * Normalize a platform-relative path to POSIX form for use as a hash map
95
+ * key. On Windows this converts backslashes to forward slashes; on POSIX
96
+ * it's a no-op.
97
+ */
98
+ function toPosix(relativePath) {
99
+ return relativePath.split(path.sep).join('/');
100
+ }
101
+
102
+ /**
103
+ * Read and validate .sdd-meta.json. Returns null on ANY read/parse/shape
104
+ * failure so callers can fall back to v0.16.10 content-compare behavior.
105
+ * Never throws.
106
+ *
107
+ * Returns { schemaVersion, hashes } on success.
108
+ */
109
+ function readMeta(dest) {
110
+ const p = path.join(dest, META_FILE);
111
+ if (!fs.existsSync(p)) return null;
112
+
113
+ let raw;
114
+ try {
115
+ raw = fs.readFileSync(p, 'utf8');
116
+ } catch (e) {
117
+ console.warn(` ⚠ .sdd-meta.json unreadable (${e.code || e.message}). Falling back to content compare.`);
118
+ return null;
119
+ }
120
+
121
+ let parsed;
122
+ try {
123
+ parsed = JSON.parse(raw);
124
+ } catch (e) {
125
+ console.warn(` ⚠ .sdd-meta.json is not valid JSON (${e.message}). Falling back to content compare.`);
126
+ return null;
127
+ }
128
+
129
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
130
+ console.warn(` ⚠ .sdd-meta.json root is not an object. Falling back.`);
131
+ return null;
132
+ }
133
+
134
+ // Schema version: absent → assume v1 (forward-compat with writers that
135
+ // might omit the field). Greater than current → log a warning and fall
136
+ // back (don't try to interpret future schemas).
137
+ const schemaVersion = parsed.schemaVersion ?? 1;
138
+ if (typeof schemaVersion !== 'number' || schemaVersion < 1) {
139
+ console.warn(` ⚠ .sdd-meta.json has invalid schemaVersion ${schemaVersion}. Falling back.`);
140
+ return null;
141
+ }
142
+ if (schemaVersion > CURRENT_SCHEMA_VERSION) {
143
+ console.warn(
144
+ ` ⚠ .sdd-meta.json schemaVersion ${schemaVersion} is newer than supported ${CURRENT_SCHEMA_VERSION}. ` +
145
+ `Falling back to content compare. Upgrade the sdd-devflow CLI to a newer version.`
146
+ );
147
+ return null;
148
+ }
149
+
150
+ const hashes = parsed.hashes;
151
+ if (typeof hashes !== 'object' || hashes === null || Array.isArray(hashes)) {
152
+ console.warn(` ⚠ .sdd-meta.json hashes field is not an object. Falling back.`);
153
+ return null;
154
+ }
155
+
156
+ // Shallow-validate each entry: key is a string, value matches the
157
+ // sha256:<hex> shape. Malformed entries are dropped silently (they'll
158
+ // be recomputed on the next upgrade).
159
+ const cleaned = {};
160
+ const HASH_RE = /^sha256:[0-9a-f]{64}$/;
161
+ for (const [k, v] of Object.entries(hashes)) {
162
+ if (typeof k !== 'string' || typeof v !== 'string') continue;
163
+ if (!HASH_RE.test(v)) continue;
164
+ // Normalize keys to POSIX in case an older writer produced
165
+ // backslashed paths on Windows.
166
+ cleaned[k.split('\\').join('/')] = v;
167
+ }
168
+
169
+ return { schemaVersion, hashes: cleaned };
170
+ }
171
+
172
+ /**
173
+ * Write .sdd-meta.json with the given hashes map. Non-fatal on failure —
174
+ * logs a warning but does NOT throw. The next upgrade will recompute and
175
+ * try again.
176
+ */
177
+ function writeMeta(dest, hashes) {
178
+ const p = path.join(dest, META_FILE);
179
+ const payload = {
180
+ schemaVersion: CURRENT_SCHEMA_VERSION,
181
+ hashes,
182
+ };
183
+ try {
184
+ fs.writeFileSync(p, JSON.stringify(payload, null, 2) + '\n', 'utf8');
185
+ } catch (e) {
186
+ console.warn(
187
+ ` ⚠ Failed to write .sdd-meta.json: ${e.code || e.message}. ` +
188
+ `Next upgrade will fall back to content compare.`
189
+ );
190
+ }
191
+ }
192
+
193
+ /**
194
+ * Compute the full set of POSIX paths that SHOULD have a hash entry for
195
+ * the given (aiTools, projectType) combination. Used for two purposes:
196
+ *
197
+ * 1. Pruning: remove hash entries for files that are expected-absent
198
+ * (e.g., single-stack project removed frontend agents) — but NOT for
199
+ * files the user temporarily deleted manually (those get recreated
200
+ * on the next upgrade, so their hash should persist).
201
+ *
202
+ * 2. Install-time hashing: iterate this set, compute each file's hash
203
+ * if the file exists on disk.
204
+ *
205
+ * v0.17.0 scope: template agents (.claude/agents/*, .gemini/agents/*)
206
+ * + AGENTS.md. SKILL.md / ticket-template.md / documentation-standards.mdc
207
+ * are tracked with wholesale-recopy + stack-adaptations on every upgrade
208
+ * (v0.16.10 behavior); they are NOT in this set.
209
+ */
210
+ function expectedSmartDiffTrackedPaths(aiTools, projectType) {
211
+ const paths = new Set();
212
+
213
+ const toolDirs = [];
214
+ if (aiTools !== 'gemini') toolDirs.push('.claude');
215
+ if (aiTools !== 'claude') toolDirs.push('.gemini');
216
+
217
+ const agents = TEMPLATE_AGENTS.filter((a) => {
218
+ if (projectType === 'backend' && FRONTEND_AGENTS.includes(a)) return false;
219
+ if (projectType === 'frontend' && BACKEND_AGENTS.includes(a)) return false;
220
+ return true;
221
+ });
222
+
223
+ for (const dir of toolDirs) {
224
+ for (const agent of agents) {
225
+ paths.add(`${dir}/agents/${agent}`);
226
+ }
227
+ }
228
+
229
+ paths.add('AGENTS.md');
230
+
231
+ return paths;
232
+ }
233
+
234
+ /**
235
+ * Remove hash entries from `hashes` that are NOT in the expected set
236
+ * for the current (aiTools, projectType). Returns a new object.
237
+ *
238
+ * This does NOT prune based on on-disk presence — a user who temporarily
239
+ * deletes an agent file keeps its hash so the next upgrade can recreate
240
+ * the file from the template and restore the hash map cleanly
241
+ * (Gemini M3 fix).
242
+ */
243
+ function pruneExpectedAbsent(hashes, aiTools, projectType) {
244
+ const expected = expectedSmartDiffTrackedPaths(aiTools, projectType);
245
+ const pruned = {};
246
+ for (const [k, v] of Object.entries(hashes)) {
247
+ if (expected.has(k)) pruned[k] = v;
248
+ }
249
+ return pruned;
250
+ }
251
+
252
+ /**
253
+ * Compute install-time hashes for a newly-populated project. Walks the
254
+ * expected set and hashes any file that exists on disk, EXCLUDING any
255
+ * path that's in `excludeSet` (e.g., `--init` encountered a pre-existing
256
+ * file and skipped it — the user owns that content, we must NOT mark it
257
+ * as tool-canonical or the next upgrade would overwrite user content.
258
+ * Codex round 2 P1 fix).
259
+ *
260
+ * @param {string} dest - Project root
261
+ * @param {string} aiTools - 'claude' | 'gemini' | 'both'
262
+ * @param {string} projectType - 'backend' | 'frontend' | 'fullstack'
263
+ * @param {Set<string>|Iterable<string>|null} excludeSet - POSIX paths to exclude. Null → no exclusion.
264
+ */
265
+ function computeInstallHashes(dest, aiTools, projectType, excludeSet = null) {
266
+ const excluded = excludeSet ? new Set(excludeSet) : null;
267
+ const hashes = {};
268
+ for (const posixPath of expectedSmartDiffTrackedPaths(aiTools, projectType)) {
269
+ if (excluded && excluded.has(posixPath)) continue;
270
+ const absPath = path.join(dest, ...posixPath.split('/'));
271
+ const hash = hashFileOnDisk(absPath);
272
+ if (hash !== null) {
273
+ hashes[posixPath] = hash;
274
+ }
275
+ }
276
+ return hashes;
277
+ }
278
+
279
+ module.exports = {
280
+ META_FILE,
281
+ CURRENT_SCHEMA_VERSION,
282
+ computeHash,
283
+ hashFileOnDisk,
284
+ toPosix,
285
+ normalizeForCompare,
286
+ readMeta,
287
+ writeMeta,
288
+ expectedSmartDiffTrackedPaths,
289
+ pruneExpectedAbsent,
290
+ computeInstallHashes,
291
+ };