create-sdd-project 0.16.10 → 0.17.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -8,6 +8,8 @@ const {
8
8
  BACKEND_AGENTS,
9
9
  } = require('./config');
10
10
  const { adaptAgentContentForProjectType } = require('./adapt-agents');
11
+ const { applyStackAdaptations } = require('./stack-adaptations');
12
+ const { writeMeta, computeInstallHashes } = require('./meta');
11
13
 
12
14
  /**
13
15
  * Install SDD DevFlow into an existing project.
@@ -252,6 +254,24 @@ function generateInit(config) {
252
254
  const pkg = JSON.parse(fs.readFileSync(path.join(__dirname, '..', 'package.json'), 'utf8'));
253
255
  fs.writeFileSync(path.join(dest, '.sdd-version'), pkg.version + '\n', 'utf8');
254
256
 
257
+ // v0.17.0: write provenance hashes. Computed AFTER all adaptations
258
+ // so the hashes reflect the exact post-install content on disk. The
259
+ // next `--upgrade` uses these hashes to answer "did the user edit
260
+ // this file?" precisely, avoiding cross-version false-positive
261
+ // preserve warnings (Codex P1 from v0.16.10 plan review).
262
+ //
263
+ // Codex round 2 P1 fix: exclude pre-existing files that --init skipped.
264
+ // Those belong to the user, not to SDD DevFlow, and must NOT be marked
265
+ // as tool-canonical — otherwise the next upgrade would hash-match the
266
+ // user's content and silently overwrite it. `skipped` already contains
267
+ // the relative paths of files that were skipped due to pre-existence;
268
+ // convert them to POSIX form and pass as the exclude set.
269
+ const skippedPosix = new Set(skipped.map((p) => p.split('\\').join('/')));
270
+ writeMeta(
271
+ dest,
272
+ computeInstallHashes(dest, config.aiTools, config.projectType, skippedPosix)
273
+ );
274
+
255
275
  // Done
256
276
  console.log(`\nDone! Next steps:`);
257
277
  console.log(` git add -A && git commit -m "chore: add SDD DevFlow to existing project"`);
@@ -343,163 +363,22 @@ function regexReplaceInFile(filePath, replacements) {
343
363
  }
344
364
 
345
365
  function adaptCopiedFiles(dest, scan, config) {
346
- const orm = scan.backend.orm || 'your ORM';
347
- const db = scan.backend.db || 'your database';
348
-
349
- // Common Zod generic validation replacements (all agents + skills)
350
- // Phase 1: Replace "Zod" with "validation" (most specific first)
351
- const zodReplacements = [
352
- ['Zod data schemas', 'validation schemas'],
353
- ['Zod schemas', 'validation schemas'],
354
- ];
355
- // Phase 2: Clean up shared/src/schemas/ path (Zod-specific convention)
356
- // Applied AFTER phase 1, so these match the post-replacement text
357
- const schemaPathReplacements = [
358
- ['validation schemas in `shared/src/schemas/` if applicable', 'validation schemas if applicable'],
359
- ['validation schemas in `shared/src/schemas/` (if shared workspace exists)', 'validation schemas (if shared workspace exists)'],
360
- ['validation schemas in `shared/src/schemas/`', 'validation schemas'],
361
- ['validation schemas (`shared/src/schemas/`)', 'validation schemas'],
362
- ['`shared/src/schemas/` (if exists) for current validation schemas', 'project validation schemas'],
363
- // Gemini spec-creator: no "Zod" prefix, standalone path reference
364
- ['and `shared/src/schemas/` (if exists)', ''],
365
- ['schemas vs `shared/src/schemas/`', 'validation schemas up to date'],
366
- ];
367
-
368
- // ORM/DB replacements for backend agents
369
- let ormReplacements = [];
370
- if (scan.backend.orm && scan.backend.orm !== 'Prisma') {
371
- ormReplacements = [
372
- ['Prisma ORM, and PostgreSQL', `${orm}${db !== 'your database' ? `, and ${db}` : ''}`],
373
- ['Repository implementations (Prisma)', `Repository implementations (${orm})`],
374
- ];
375
- } else if (!scan.backend.orm) {
376
- // No ORM detected — remove Prisma references with generic text
377
- const dbLabel = db !== 'your database' ? `, and ${db}` : '';
378
- ormReplacements = [
379
- ['Prisma ORM, and PostgreSQL', dbLabel ? dbLabel.slice(6) : 'your database'],
380
- ['Repository implementations (Prisma)', 'Repository implementations'],
381
- ];
382
- }
383
-
384
- // Apply to all AI tool directories
385
- const toolDirs = [];
386
- if (config.aiTools !== 'gemini') toolDirs.push('.claude');
387
- if (config.aiTools !== 'claude') toolDirs.push('.gemini');
388
-
389
- for (const dir of toolDirs) {
390
- // Backend agents: Zod + ORM replacements
391
- if (scan.backend.validation !== 'Zod') {
392
- const backendAgentReplacements = [...zodReplacements, ...ormReplacements];
393
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, backendAgentReplacements);
394
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, backendAgentReplacements);
395
- // Phase 2: clean up shared/src/schemas/ paths
396
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, schemaPathReplacements);
397
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, schemaPathReplacements);
398
- } else if (ormReplacements.length > 0) {
399
- // Zod detected but different ORM — only ORM replacements
400
- replaceInCopiedFile(dest, `${dir}/agents/backend-developer.md`, ormReplacements);
401
- replaceInCopiedFile(dest, `${dir}/agents/backend-planner.md`, ormReplacements);
402
- }
403
-
404
- // Multi-purpose agents: Zod replacements only
405
- if (scan.backend.validation !== 'Zod') {
406
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
407
- replaceInCopiedFile(dest, `${dir}/agents/spec-creator.md`, allZodReplacements);
408
- replaceInCopiedFile(dest, `${dir}/agents/production-code-validator.md`, allZodReplacements);
409
- replaceInCopiedFile(dest, `${dir}/agents/database-architect.md`, allZodReplacements);
410
- }
411
-
412
- // Skills: Zod + schema path replacements
413
- if (scan.backend.validation !== 'Zod') {
414
- const allZodReplacements = [...zodReplacements, ...schemaPathReplacements];
415
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/SKILL.md`, allZodReplacements);
416
- replaceInCopiedFile(dest, `${dir}/skills/development-workflow/references/ticket-template.md`, allZodReplacements);
417
- }
418
- }
419
-
420
- // Architecture adaptation: DDD-specific content in backend agents
421
- const arch = scan.srcStructure ? scan.srcStructure.pattern : 'ddd';
422
- if (arch !== 'ddd') {
423
- for (const dir of toolDirs) {
424
- // backend-planner: adapt header, exploration paths, implementation order, rules
425
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-planner.md'), [
426
- // Header: remove DDD reference (Claude verbose format)
427
- ['specializing in Domain-Driven Design (DDD) layered architecture with deep knowledge of',
428
- 'specializing in layered architecture with deep knowledge of'],
429
- // Header: remove DDD reference (Gemini condensed format)
430
- ['(DDD architecture)', '(layered architecture)'],
431
- // Exploration: replace DDD-specific paths with generic (number-agnostic)
432
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
433
- [/\d+\. Explore existing domain entities, services, validators, repositories\n/,
434
- '5. Explore the codebase for existing patterns, layer structure, and reusable code\n'],
435
- [/\d+\. Explore `backend\/src\/infrastructure\/` for existing repositories\n/, ''],
436
- // Implementation Order (Claude format)
437
- ['following DDD layer order: Domain > Application > Infrastructure > Presentation > Tests',
438
- 'following the layer order defined in backend-standards.mdc'],
439
- // Implementation Order (Gemini format)
440
- ['Implementation Order (Domain > Application > Infrastructure > Presentation > Tests)',
441
- 'Implementation Order (see backend-standards.mdc for layer order)'],
442
- // Rules (Claude format)
443
- ['Follow DDD layer separation: Domain > Application > Infrastructure > Presentation',
444
- 'Follow the layer separation defined in backend-standards.mdc'],
445
- ]);
446
- // backend-developer: adapt frontmatter, header, exploration, implementation order, rules
447
- regexReplaceInFile(path.join(dest, dir, 'agents', 'backend-developer.md'), [
448
- // Frontmatter (Claude format)
449
- ['follows DDD layered architecture',
450
- 'follows layered architecture'],
451
- // Header (Claude format)
452
- ['specializing in Domain-Driven Design (DDD) with',
453
- 'specializing in layered architecture with'],
454
- // Header (Gemini condensed format)
455
- ['(DDD architecture)', '(layered architecture)'],
456
- // Exploration: remove shared/src/schemas reference (number-agnostic)
457
- [/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
458
- // Implementation Order (Claude format): replace DDD layers
459
- ['Follow the DDD layer order from the plan:',
460
- 'Follow the layer order from the plan (see backend-standards.mdc for project layers):'],
461
- [/\d+\. \*\*Domain Layer\*\*: Entities, value objects, repository interfaces, domain errors\n/,
462
- '1. **Data Layer**: Models, database operations, data access\n'],
463
- [/\d+\. \*\*Application Layer\*\*: Services, validators, DTOs\n/,
464
- '2. **Business Logic Layer**: Controllers, services, external integrations\n'],
465
- [/\d+\. \*\*Infrastructure Layer\*\*: Repository implementations \([^)]*\), external integrations\n/,
466
- '3. **Presentation Layer**: Routes, handlers, middleware\n'],
467
- [/\d+\. \*\*Presentation Layer\*\*: Controllers, routes, middleware\n/,
468
- '4. **Integration Layer**: Wiring, configuration, server registration\n'],
469
- // Implementation Order (Gemini format)
470
- ['Follow DDD layer order: Domain > Application > Infrastructure > Presentation.',
471
- 'Follow the layer order defined in backend-standards.mdc.'],
472
- // Rules (Claude format)
473
- ['**ALWAYS** follow DDD layer separation',
474
- '**ALWAYS** follow the layer separation defined in backend-standards.mdc'],
475
- ['**ALWAYS** handle errors with custom domain error classes',
476
- '**ALWAYS** handle errors following the patterns in backend-standards.mdc'],
477
- // Rules (Gemini format)
478
- ['ALWAYS handle errors with domain error classes',
479
- 'ALWAYS handle errors following the patterns in backend-standards.mdc'],
480
- // Documentation: remove shared/src/schemas mandatory update
481
- [/- (?:\*\*MANDATORY\*\*: )?If modifying a DB schema → update .* schemas in `shared\/src\/schemas\/` BEFORE continuing\n/, ''],
482
- ]);
483
- }
484
- }
485
-
486
- // Agent/skill content: remove frontend/backend-specific references for single-stack projects
366
+ // v0.17.0: delegate stack-specific adaptations (Zod → validation,
367
+ // ORM swap, DDD layered, documentation-standards.mdc project-type
368
+ // pruning) to the shared module in lib/stack-adaptations.js. This
369
+ // makes the exact same transformation available to upgrade-generator.js
370
+ // for the hash-based smart-diff replacement path.
371
+ //
372
+ // The in-memory variant (applyStackAdaptationsToContent) is also used
373
+ // by the upgrade fallback path to construct the "what init would have
374
+ // written" comparison target when .sdd-meta.json is missing (Gemini M1
375
+ // fix from plan v1.0 cross-model review).
376
+ applyStackAdaptations(dest, scan, config, null);
377
+
378
+ // Remove frontend/backend-specific references for single-stack
379
+ // projects. Separate from stack adaptations this is project-type
380
+ // pruning, not stack substitution.
487
381
  adaptAgentContentForProjectType(dest, config, regexReplaceInFile);
488
-
489
- // documentation-standards.mdc: remove irrelevant rows based on project type
490
- const docStdPath = path.join(dest, 'ai-specs', 'specs', 'documentation-standards.mdc');
491
- if (fs.existsSync(docStdPath)) {
492
- let content = fs.readFileSync(docStdPath, 'utf8');
493
- if (config.projectType === 'backend') {
494
- content = content.replace(/\| `ai-specs\/specs\/frontend-standards\.mdc` \|[^\n]*\n/, '');
495
- content = content.replace(/\| `docs\/specs\/ui-components\.md` \|[^\n]*\n/, '');
496
- content = content.replace(/ - UI component changes → `docs\/specs\/ui-components\.md`\n/, '');
497
- } else if (config.projectType === 'frontend') {
498
- content = content.replace(/\| `ai-specs\/specs\/backend-standards\.mdc` \|[^\n]*\n/, '');
499
- content = content.replace(/\| `docs\/specs\/api-spec\.yaml` \|[^\n]*\n/, '');
500
- }
501
- fs.writeFileSync(docStdPath, content, 'utf8');
502
- }
503
382
  }
504
383
 
505
384
  // --- Standards Adaptation ---
@@ -697,10 +576,15 @@ function adaptFrontendStandards(template, scan) {
697
576
  );
698
577
 
699
578
  // Update Project Structure
579
+ // v0.17.1: the regex consumes the optional trailing TODO line so idempotent
580
+ // reapplication doesn't duplicate the marker. Before: on second call the
581
+ // regex only matched up to the closing ``` and the replacement re-inserted
582
+ // the TODO, producing two copies. After: the (\n\n<!-- TODO: ... -->)? group
583
+ // is included in the match so the replacement overwrites it.
700
584
  const rootDirs = scan.rootDirs.filter((d) => !['docs/', 'ai-specs/', 'node_modules/'].includes(d));
701
585
  const tree = rootDirs.map((d) => `├── ${d.replace(/\/$/, '/')}`).join('\n');
702
586
  content = content.replace(
703
- /## Project Structure\n\n```\n[\s\S]*?```/,
587
+ /## Project Structure\n\n```\n[\s\S]*?```(\n\n<!-- TODO: Expand the structure above[^\n]*-->)?/,
704
588
  `## Project Structure\n\n\`\`\`\nproject/\n${tree}\n\`\`\`\n\n<!-- TODO: Expand the structure above with your key subdirectories. -->`
705
589
  );
706
590
 
@@ -762,7 +646,14 @@ function adaptAgentsMd(template, config, scan) {
762
646
  return norm !== 'docs' && norm !== 'ai-specs';
763
647
  });
764
648
  if (meaningfulDirs.length > 0) {
765
- const tree = rootDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
649
+ // v0.17.0: use meaningfulDirs (not rootDirs) to build the tree. The
650
+ // SDD-infrastructure dirs (ai-specs/, docs/) get installed BY init
651
+ // itself, so at install time they're absent from the scan but at
652
+ // upgrade time they're present. Using rootDirs would produce
653
+ // different content in each scan → hash drift → false-positive
654
+ // preserve warnings on every upgrade. Using meaningfulDirs stabilizes
655
+ // the output across install and upgrade for the same project.
656
+ const tree = meaningfulDirs.map((d) => `├── ${d.replace(/\/$/, '/')} `).join('\n');
766
657
  const treeBlock = `\`\`\`\nproject/\n${tree}\n└── docs/ ← Documentation\n\`\`\``;
767
658
  content = content.replace(
768
659
  /<!-- CONFIG: Adjust directories[^>]*-->\n+```\nproject\/\n[\s\S]*?```/,
@@ -1069,9 +960,25 @@ function appendGitignore(dest, skipped) {
1069
960
  const sddEntries = '\n# SDD DevFlow\ndocs/tickets/*.md\n!docs/tickets/.gitkeep\n';
1070
961
 
1071
962
  if (fs.existsSync(gitignorePath)) {
1072
- const content = fs.readFileSync(gitignorePath, 'utf8');
963
+ let content = fs.readFileSync(gitignorePath, 'utf8');
964
+ let appended = false;
1073
965
  if (!content.includes('SDD DevFlow')) {
1074
- fs.appendFileSync(gitignorePath, sddEntries, 'utf8');
966
+ content = content.trimEnd() + sddEntries;
967
+ appended = true;
968
+ }
969
+ // v0.17.0: ensure .sdd-meta.json + .sdd-backup/ are ignored (local-only
970
+ // metadata). Codex round 2 P2 fix — --init must also append these
971
+ // entries idempotently, matching the upgrade path.
972
+ if (!/^\s*\/?\.sdd-backup\/?\s*$/m.test(content)) {
973
+ content = content.trimEnd() + '\n\n# sdd-devflow upgrade backups (ignored — kept locally for recovery only)\n.sdd-backup/\n';
974
+ appended = true;
975
+ }
976
+ if (!/^\s*\/?\.sdd-meta\.json\s*$/m.test(content)) {
977
+ content = content.trimEnd() + '\n\n# sdd-devflow provenance tracking (local-only, content-addressable hashes)\n.sdd-meta.json\n';
978
+ appended = true;
979
+ }
980
+ if (appended) {
981
+ fs.writeFileSync(gitignorePath, content, 'utf8');
1075
982
  step('Appended SDD entries to .gitignore');
1076
983
  }
1077
984
  } else {
package/lib/meta.js ADDED
@@ -0,0 +1,344 @@
1
+ 'use strict';
2
+
3
+ /**
4
+ * SDD DevFlow provenance tracking — v0.17.0+
5
+ *
6
+ * The `.sdd-meta.json` file stores content-addressable hashes of files the
7
+ * tool considers "canonically tool-owned" (template agents + AGENTS.md in
8
+ * v0.17.0). The upgrade path uses these hashes to answer the question
9
+ * "has the user edited this file since the last time the tool wrote it?"
10
+ * precisely, without comparing against the new template's adapted output
11
+ * (which drifts across versions and causes false-positive preserve
12
+ * warnings on cross-version upgrades — the Codex P1 finding from the
13
+ * v0.16.10 cross-model review).
14
+ *
15
+ * Core invariant (Codex M1 from plan v1.0 review): a hash in this file
16
+ * represents "the last time the tool wrote this file, the content hashed
17
+ * to X". Hashes are ONLY written/updated when the tool actually wrote
18
+ * canonical output to a file in the current run (replaced, new, or
19
+ * --force-template paths). Preserved files leave their hash entry
20
+ * untouched — otherwise the user's customized content would be hashed and
21
+ * silently overwritten on the next upgrade.
22
+ *
23
+ * File format (schemaVersion: 1):
24
+ * {
25
+ * "schemaVersion": 1,
26
+ * "hashes": {
27
+ * ".claude/agents/backend-planner.md": "sha256:abc...",
28
+ * "AGENTS.md": "sha256:def...",
29
+ * ...
30
+ * }
31
+ * }
32
+ *
33
+ * Path keys are POSIX-normalized (forward slashes) on ALL platforms so
34
+ * lookups work consistently on Windows where path.join would otherwise
35
+ * produce backslashes (Gemini M2 fix).
36
+ */
37
+
38
+ const fs = require('node:fs');
39
+ const path = require('node:path');
40
+ const { createHash } = require('node:crypto');
41
+
42
+ const { FRONTEND_AGENTS, BACKEND_AGENTS, TEMPLATE_AGENTS } = require('./config');
43
+
44
+ const META_FILE = '.sdd-meta.json';
45
+ const CURRENT_SCHEMA_VERSION = 1;
46
+
47
+ /**
48
+ * Normalize text for content-addressable hashing.
49
+ *
50
+ * v0.17.0: only strip CR / CRLF line endings (Windows git core.autocrlf
51
+ * compatibility). Do NOT strip trailing whitespace per line — that would
52
+ * destroy markdown hard-breaks (two trailing spaces render as <br>) and
53
+ * silently wipe user customizations that only touched whitespace
54
+ * (Gemini M2 fix).
55
+ *
56
+ * Trade-off: editors configured to "trim trailing whitespace on save"
57
+ * (e.g. VSCode files.trimTrailingWhitespace=true) will produce a hash
58
+ * mismatch even without semantic edits. This is a conservative false
59
+ * positive — the upgrade preserves the file and the user can re-run
60
+ * with --force-template to accept the new template content. The
61
+ * alternative (silent wipe of markdown hard-breaks) is strictly worse.
62
+ */
63
+ function normalizeForCompare(text) {
64
+ return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
65
+ }
66
+
67
+ /**
68
+ * v0.17.1: file-agnostic content equality helper. Returns true when two
69
+ * strings are equal after CR/CRLF normalization. Replaces the v0.17.0
70
+ * `isStandardModified` function, which was standards-specific in name but
71
+ * identical in logic after the Gemini M2 normalization fix.
72
+ *
73
+ * Rename rationale (round-2 review consolidated): Codex round-1 Q9 wanted
74
+ * `isStandardModified` deleted; Gemini round-1 Q9 wanted a named helper
75
+ * for fallback-path readability; Codex round-2 Q11 pointed out that the
76
+ * kept-name-as-wrapper compromise was vestigial because the name still
77
+ * implied standards-specific policy. This rename makes the helper's
78
+ * file-agnostic nature explicit and its location (next to its dependency
79
+ * `normalizeForCompare`) structural.
80
+ *
81
+ * Scope of normalization (important, often misstated in earlier plan
82
+ * drafts): this helper ONLY normalizes CR/CRLF line endings. It does NOT
83
+ * strip trailing whitespace per line nor leading/trailing blank lines —
84
+ * that would destroy markdown hard-breaks (two trailing spaces render as
85
+ * `<br>`) and silently wipe user customizations that only touched
86
+ * whitespace. The conservative behavior is intentional; the alternative
87
+ * is strictly worse.
88
+ */
89
+ function normalizedContentEquals(a, b) {
90
+ return normalizeForCompare(a) === normalizeForCompare(b);
91
+ }
92
+
93
+ /**
94
+ * Compute the content-addressable hash of a string.
95
+ *
96
+ * Returns 'sha256:<hex>'. The prefix is mandatory so v0.17.x can
97
+ * introduce additional algorithms (e.g. 'blake3:...') without breaking
98
+ * old readers — equality comparison on the full string handles upgrades
99
+ * naturally.
100
+ */
101
+ function computeHash(content) {
102
+ const digest = createHash('sha256').update(normalizeForCompare(content), 'utf8').digest('hex');
103
+ return `sha256:${digest}`;
104
+ }
105
+
106
+ /**
107
+ * Compute the hash of a file on disk, or null if it doesn't exist.
108
+ * Reads as UTF-8 (all tracked files are text).
109
+ */
110
+ function hashFileOnDisk(absPath) {
111
+ if (!fs.existsSync(absPath)) return null;
112
+ try {
113
+ return computeHash(fs.readFileSync(absPath, 'utf8'));
114
+ } catch {
115
+ return null;
116
+ }
117
+ }
118
+
119
+ /**
120
+ * Normalize a platform-relative path to POSIX form for use as a hash map
121
+ * key. On Windows this converts backslashes to forward slashes; on POSIX
122
+ * it's a no-op.
123
+ */
124
+ function toPosix(relativePath) {
125
+ return relativePath.split(path.sep).join('/');
126
+ }
127
+
128
+ /**
129
+ * Read and validate .sdd-meta.json. Returns null on ANY read/parse/shape
130
+ * failure so callers can fall back to v0.16.10 content-compare behavior.
131
+ * Never throws.
132
+ *
133
+ * Returns { schemaVersion, hashes } on success.
134
+ */
135
+ function readMeta(dest) {
136
+ const p = path.join(dest, META_FILE);
137
+ if (!fs.existsSync(p)) return null;
138
+
139
+ let raw;
140
+ try {
141
+ raw = fs.readFileSync(p, 'utf8');
142
+ } catch (e) {
143
+ console.warn(` ⚠ .sdd-meta.json unreadable (${e.code || e.message}). Falling back to content compare.`);
144
+ return null;
145
+ }
146
+
147
+ let parsed;
148
+ try {
149
+ parsed = JSON.parse(raw);
150
+ } catch (e) {
151
+ console.warn(` ⚠ .sdd-meta.json is not valid JSON (${e.message}). Falling back to content compare.`);
152
+ return null;
153
+ }
154
+
155
+ if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
156
+ console.warn(` ⚠ .sdd-meta.json root is not an object. Falling back.`);
157
+ return null;
158
+ }
159
+
160
+ // Schema version: absent → assume v1 (forward-compat with writers that
161
+ // might omit the field). Greater than current → log a warning and fall
162
+ // back (don't try to interpret future schemas).
163
+ const schemaVersion = parsed.schemaVersion ?? 1;
164
+ if (typeof schemaVersion !== 'number' || schemaVersion < 1) {
165
+ console.warn(` ⚠ .sdd-meta.json has invalid schemaVersion ${schemaVersion}. Falling back.`);
166
+ return null;
167
+ }
168
+ if (schemaVersion > CURRENT_SCHEMA_VERSION) {
169
+ console.warn(
170
+ ` ⚠ .sdd-meta.json schemaVersion ${schemaVersion} is newer than supported ${CURRENT_SCHEMA_VERSION}. ` +
171
+ `Falling back to content compare. Upgrade the sdd-devflow CLI to a newer version.`
172
+ );
173
+ return null;
174
+ }
175
+
176
+ const hashes = parsed.hashes;
177
+ if (typeof hashes !== 'object' || hashes === null || Array.isArray(hashes)) {
178
+ console.warn(` ⚠ .sdd-meta.json hashes field is not an object. Falling back.`);
179
+ return null;
180
+ }
181
+
182
+ // Shallow-validate each entry: key is a string, value matches the
183
+ // sha256:<hex> shape. Malformed entries are dropped silently (they'll
184
+ // be recomputed on the next upgrade).
185
+ const cleaned = {};
186
+ const HASH_RE = /^sha256:[0-9a-f]{64}$/;
187
+ for (const [k, v] of Object.entries(hashes)) {
188
+ if (typeof k !== 'string' || typeof v !== 'string') continue;
189
+ if (!HASH_RE.test(v)) continue;
190
+ // Normalize keys to POSIX in case an older writer produced
191
+ // backslashed paths on Windows.
192
+ cleaned[k.split('\\').join('/')] = v;
193
+ }
194
+
195
+ return { schemaVersion, hashes: cleaned };
196
+ }
197
+
198
+ /**
199
+ * Write .sdd-meta.json with the given hashes map. Non-fatal on failure —
200
+ * logs a warning but does NOT throw. The next upgrade will recompute and
201
+ * try again.
202
+ */
203
+ function writeMeta(dest, hashes) {
204
+ const p = path.join(dest, META_FILE);
205
+ const payload = {
206
+ schemaVersion: CURRENT_SCHEMA_VERSION,
207
+ hashes,
208
+ };
209
+ try {
210
+ fs.writeFileSync(p, JSON.stringify(payload, null, 2) + '\n', 'utf8');
211
+ } catch (e) {
212
+ console.warn(
213
+ ` ⚠ Failed to write .sdd-meta.json: ${e.code || e.message}. ` +
214
+ `Next upgrade will fall back to content compare.`
215
+ );
216
+ }
217
+ }
218
+
219
+ /**
220
+ * Compute the full set of POSIX paths that SHOULD have a hash entry for
221
+ * the given (aiTools, projectType) combination. Used for two purposes:
222
+ *
223
+ * 1. Pruning: remove hash entries for files that are expected-absent
224
+ * (e.g., single-stack project removed frontend agents) — but NOT for
225
+ * files the user temporarily deleted manually (those get recreated
226
+ * on the next upgrade, so their hash should persist).
227
+ *
228
+ * 2. Install-time hashing: iterate this set, compute each file's hash
229
+ * if the file exists on disk.
230
+ *
231
+ * v0.17.0 scope: template agents (.claude/agents/*, .gemini/agents/*)
232
+ * + AGENTS.md.
233
+ *
234
+ * v0.17.1 scope extension (Codex M1 option 2 deferred from v0.17.0):
235
+ * - 4 standards files (ai-specs/specs/*.mdc) — always tracked
236
+ * - 6 workflow-core files (development-workflow SKILL.md + ticket-template.md
237
+ * + merge-checklist.md, × 2 tools) — filtered by aiTools
238
+ *
239
+ * Out of scope for v0.17.1 (deferred to v0.17.2): bug-workflow/SKILL.md,
240
+ * health-check/SKILL.md, pm-orchestrator/SKILL.md, project-memory/SKILL.md,
241
+ * and all references/ files except the 3 development-workflow ones above.
242
+ */
243
+ function expectedSmartDiffTrackedPaths(aiTools, projectType) {
244
+ const paths = new Set();
245
+
246
+ const toolDirs = [];
247
+ if (aiTools !== 'gemini') toolDirs.push('.claude');
248
+ if (aiTools !== 'claude') toolDirs.push('.gemini');
249
+
250
+ const agents = TEMPLATE_AGENTS.filter((a) => {
251
+ if (projectType === 'backend' && FRONTEND_AGENTS.includes(a)) return false;
252
+ if (projectType === 'frontend' && BACKEND_AGENTS.includes(a)) return false;
253
+ return true;
254
+ });
255
+
256
+ for (const dir of toolDirs) {
257
+ for (const agent of agents) {
258
+ paths.add(`${dir}/agents/${agent}`);
259
+ }
260
+ }
261
+
262
+ paths.add('AGENTS.md');
263
+
264
+ // v0.17.1: standards — always tracked (independent of aiTools/projectType).
265
+ // Project-type filtering (backend-only skips frontend-standards.mdc) is
266
+ // enforced by the existing install/upgrade pipeline that only writes the
267
+ // relevant standards for the project type; their hash entries are simply
268
+ // absent for the non-applicable side.
269
+ paths.add('ai-specs/specs/base-standards.mdc');
270
+ if (projectType !== 'frontend') paths.add('ai-specs/specs/backend-standards.mdc');
271
+ if (projectType !== 'backend') paths.add('ai-specs/specs/frontend-standards.mdc');
272
+ paths.add('ai-specs/specs/documentation-standards.mdc');
273
+
274
+ // v0.17.1: development-workflow skill core files — filtered by aiTools.
275
+ // bug-workflow, health-check, pm-orchestrator, project-memory are OUT OF
276
+ // SCOPE for v0.17.1 (deferred to v0.17.2).
277
+ for (const dir of toolDirs) {
278
+ paths.add(`${dir}/skills/development-workflow/SKILL.md`);
279
+ paths.add(`${dir}/skills/development-workflow/references/ticket-template.md`);
280
+ paths.add(`${dir}/skills/development-workflow/references/merge-checklist.md`);
281
+ }
282
+
283
+ return paths;
284
+ }
285
+
286
+ /**
287
+ * Remove hash entries from `hashes` that are NOT in the expected set
288
+ * for the current (aiTools, projectType). Returns a new object.
289
+ *
290
+ * This does NOT prune based on on-disk presence — a user who temporarily
291
+ * deletes an agent file keeps its hash so the next upgrade can recreate
292
+ * the file from the template and restore the hash map cleanly
293
+ * (Gemini M3 fix).
294
+ */
295
+ function pruneExpectedAbsent(hashes, aiTools, projectType) {
296
+ const expected = expectedSmartDiffTrackedPaths(aiTools, projectType);
297
+ const pruned = {};
298
+ for (const [k, v] of Object.entries(hashes)) {
299
+ if (expected.has(k)) pruned[k] = v;
300
+ }
301
+ return pruned;
302
+ }
303
+
304
+ /**
305
+ * Compute install-time hashes for a newly-populated project. Walks the
306
+ * expected set and hashes any file that exists on disk, EXCLUDING any
307
+ * path that's in `excludeSet` (e.g., `--init` encountered a pre-existing
308
+ * file and skipped it — the user owns that content, we must NOT mark it
309
+ * as tool-canonical or the next upgrade would overwrite user content.
310
+ * Codex round 2 P1 fix).
311
+ *
312
+ * @param {string} dest - Project root
313
+ * @param {string} aiTools - 'claude' | 'gemini' | 'both'
314
+ * @param {string} projectType - 'backend' | 'frontend' | 'fullstack'
315
+ * @param {Set<string>|Iterable<string>|null} excludeSet - POSIX paths to exclude. Null → no exclusion.
316
+ */
317
+ function computeInstallHashes(dest, aiTools, projectType, excludeSet = null) {
318
+ const excluded = excludeSet ? new Set(excludeSet) : null;
319
+ const hashes = {};
320
+ for (const posixPath of expectedSmartDiffTrackedPaths(aiTools, projectType)) {
321
+ if (excluded && excluded.has(posixPath)) continue;
322
+ const absPath = path.join(dest, ...posixPath.split('/'));
323
+ const hash = hashFileOnDisk(absPath);
324
+ if (hash !== null) {
325
+ hashes[posixPath] = hash;
326
+ }
327
+ }
328
+ return hashes;
329
+ }
330
+
331
+ module.exports = {
332
+ META_FILE,
333
+ CURRENT_SCHEMA_VERSION,
334
+ computeHash,
335
+ hashFileOnDisk,
336
+ toPosix,
337
+ normalizeForCompare,
338
+ normalizedContentEquals,
339
+ readMeta,
340
+ writeMeta,
341
+ expectedSmartDiffTrackedPaths,
342
+ pruneExpectedAbsent,
343
+ computeInstallHashes,
344
+ };