create-sdd-project 0.16.10 → 0.17.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/lib/doctor.js +148 -0
- package/lib/generator.js +8 -0
- package/lib/init-generator.js +61 -159
- package/lib/meta.js +291 -0
- package/lib/stack-adaptations.js +335 -0
- package/lib/upgrade-generator.js +264 -95
- package/package.json +1 -1
- package/template/gitignore +3 -0
package/lib/meta.js
ADDED
|
@@ -0,0 +1,291 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* SDD DevFlow provenance tracking — v0.17.0+
|
|
5
|
+
*
|
|
6
|
+
* The `.sdd-meta.json` file stores content-addressable hashes of files the
|
|
7
|
+
* tool considers "canonically tool-owned" (template agents + AGENTS.md in
|
|
8
|
+
* v0.17.0). The upgrade path uses these hashes to answer the question
|
|
9
|
+
* "has the user edited this file since the last time the tool wrote it?"
|
|
10
|
+
* precisely, without comparing against the new template's adapted output
|
|
11
|
+
* (which drifts across versions and causes false-positive preserve
|
|
12
|
+
* warnings on cross-version upgrades — the Codex P1 finding from the
|
|
13
|
+
* v0.16.10 cross-model review).
|
|
14
|
+
*
|
|
15
|
+
* Core invariant (Codex M1 from plan v1.0 review): a hash in this file
|
|
16
|
+
* represents "the last time the tool wrote this file, the content hashed
|
|
17
|
+
* to X". Hashes are ONLY written/updated when the tool actually wrote
|
|
18
|
+
* canonical output to a file in the current run (replaced, new, or
|
|
19
|
+
* --force-template paths). Preserved files leave their hash entry
|
|
20
|
+
* untouched — otherwise the user's customized content would be hashed and
|
|
21
|
+
* silently overwritten on the next upgrade.
|
|
22
|
+
*
|
|
23
|
+
* File format (schemaVersion: 1):
|
|
24
|
+
* {
|
|
25
|
+
* "schemaVersion": 1,
|
|
26
|
+
* "hashes": {
|
|
27
|
+
* ".claude/agents/backend-planner.md": "sha256:abc...",
|
|
28
|
+
* "AGENTS.md": "sha256:def...",
|
|
29
|
+
* ...
|
|
30
|
+
* }
|
|
31
|
+
* }
|
|
32
|
+
*
|
|
33
|
+
* Path keys are POSIX-normalized (forward slashes) on ALL platforms so
|
|
34
|
+
* lookups work consistently on Windows where path.join would otherwise
|
|
35
|
+
* produce backslashes (Gemini M2 fix).
|
|
36
|
+
*/
|
|
37
|
+
|
|
38
|
+
const fs = require('node:fs');
|
|
39
|
+
const path = require('node:path');
|
|
40
|
+
const { createHash } = require('node:crypto');
|
|
41
|
+
|
|
42
|
+
const { FRONTEND_AGENTS, BACKEND_AGENTS, TEMPLATE_AGENTS } = require('./config');
|
|
43
|
+
|
|
44
|
+
const META_FILE = '.sdd-meta.json';
|
|
45
|
+
const CURRENT_SCHEMA_VERSION = 1;
|
|
46
|
+
|
|
47
|
+
/**
|
|
48
|
+
* Normalize text for content-addressable hashing.
|
|
49
|
+
*
|
|
50
|
+
* v0.17.0: only strip CR / CRLF line endings (Windows git core.autocrlf
|
|
51
|
+
* compatibility). Do NOT strip trailing whitespace per line — that would
|
|
52
|
+
* destroy markdown hard-breaks (two trailing spaces render as <br>) and
|
|
53
|
+
* silently wipe user customizations that only touched whitespace
|
|
54
|
+
* (Gemini M2 fix).
|
|
55
|
+
*
|
|
56
|
+
* Trade-off: editors configured to "trim trailing whitespace on save"
|
|
57
|
+
* (e.g. VSCode files.trimTrailingWhitespace=true) will produce a hash
|
|
58
|
+
* mismatch even without semantic edits. This is a conservative false
|
|
59
|
+
* positive — the upgrade preserves the file and the user can re-run
|
|
60
|
+
* with --force-template to accept the new template content. The
|
|
61
|
+
* alternative (silent wipe of markdown hard-breaks) is strictly worse.
|
|
62
|
+
*/
|
|
63
|
+
function normalizeForCompare(text) {
|
|
64
|
+
return text.replace(/\r\n/g, '\n').replace(/\r/g, '\n');
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
/**
|
|
68
|
+
* Compute the content-addressable hash of a string.
|
|
69
|
+
*
|
|
70
|
+
* Returns 'sha256:<hex>'. The prefix is mandatory so v0.17.x can
|
|
71
|
+
* introduce additional algorithms (e.g. 'blake3:...') without breaking
|
|
72
|
+
* old readers — equality comparison on the full string handles upgrades
|
|
73
|
+
* naturally.
|
|
74
|
+
*/
|
|
75
|
+
function computeHash(content) {
|
|
76
|
+
const digest = createHash('sha256').update(normalizeForCompare(content), 'utf8').digest('hex');
|
|
77
|
+
return `sha256:${digest}`;
|
|
78
|
+
}
|
|
79
|
+
|
|
80
|
+
/**
|
|
81
|
+
* Compute the hash of a file on disk, or null if it doesn't exist.
|
|
82
|
+
* Reads as UTF-8 (all tracked files are text).
|
|
83
|
+
*/
|
|
84
|
+
function hashFileOnDisk(absPath) {
|
|
85
|
+
if (!fs.existsSync(absPath)) return null;
|
|
86
|
+
try {
|
|
87
|
+
return computeHash(fs.readFileSync(absPath, 'utf8'));
|
|
88
|
+
} catch {
|
|
89
|
+
return null;
|
|
90
|
+
}
|
|
91
|
+
}
|
|
92
|
+
|
|
93
|
+
/**
|
|
94
|
+
* Normalize a platform-relative path to POSIX form for use as a hash map
|
|
95
|
+
* key. On Windows this converts backslashes to forward slashes; on POSIX
|
|
96
|
+
* it's a no-op.
|
|
97
|
+
*/
|
|
98
|
+
function toPosix(relativePath) {
|
|
99
|
+
return relativePath.split(path.sep).join('/');
|
|
100
|
+
}
|
|
101
|
+
|
|
102
|
+
/**
|
|
103
|
+
* Read and validate .sdd-meta.json. Returns null on ANY read/parse/shape
|
|
104
|
+
* failure so callers can fall back to v0.16.10 content-compare behavior.
|
|
105
|
+
* Never throws.
|
|
106
|
+
*
|
|
107
|
+
* Returns { schemaVersion, hashes } on success.
|
|
108
|
+
*/
|
|
109
|
+
function readMeta(dest) {
|
|
110
|
+
const p = path.join(dest, META_FILE);
|
|
111
|
+
if (!fs.existsSync(p)) return null;
|
|
112
|
+
|
|
113
|
+
let raw;
|
|
114
|
+
try {
|
|
115
|
+
raw = fs.readFileSync(p, 'utf8');
|
|
116
|
+
} catch (e) {
|
|
117
|
+
console.warn(` ⚠ .sdd-meta.json unreadable (${e.code || e.message}). Falling back to content compare.`);
|
|
118
|
+
return null;
|
|
119
|
+
}
|
|
120
|
+
|
|
121
|
+
let parsed;
|
|
122
|
+
try {
|
|
123
|
+
parsed = JSON.parse(raw);
|
|
124
|
+
} catch (e) {
|
|
125
|
+
console.warn(` ⚠ .sdd-meta.json is not valid JSON (${e.message}). Falling back to content compare.`);
|
|
126
|
+
return null;
|
|
127
|
+
}
|
|
128
|
+
|
|
129
|
+
if (typeof parsed !== 'object' || parsed === null || Array.isArray(parsed)) {
|
|
130
|
+
console.warn(` ⚠ .sdd-meta.json root is not an object. Falling back.`);
|
|
131
|
+
return null;
|
|
132
|
+
}
|
|
133
|
+
|
|
134
|
+
// Schema version: absent → assume v1 (forward-compat with writers that
|
|
135
|
+
// might omit the field). Greater than current → log a warning and fall
|
|
136
|
+
// back (don't try to interpret future schemas).
|
|
137
|
+
const schemaVersion = parsed.schemaVersion ?? 1;
|
|
138
|
+
if (typeof schemaVersion !== 'number' || schemaVersion < 1) {
|
|
139
|
+
console.warn(` ⚠ .sdd-meta.json has invalid schemaVersion ${schemaVersion}. Falling back.`);
|
|
140
|
+
return null;
|
|
141
|
+
}
|
|
142
|
+
if (schemaVersion > CURRENT_SCHEMA_VERSION) {
|
|
143
|
+
console.warn(
|
|
144
|
+
` ⚠ .sdd-meta.json schemaVersion ${schemaVersion} is newer than supported ${CURRENT_SCHEMA_VERSION}. ` +
|
|
145
|
+
`Falling back to content compare. Upgrade the sdd-devflow CLI to a newer version.`
|
|
146
|
+
);
|
|
147
|
+
return null;
|
|
148
|
+
}
|
|
149
|
+
|
|
150
|
+
const hashes = parsed.hashes;
|
|
151
|
+
if (typeof hashes !== 'object' || hashes === null || Array.isArray(hashes)) {
|
|
152
|
+
console.warn(` ⚠ .sdd-meta.json hashes field is not an object. Falling back.`);
|
|
153
|
+
return null;
|
|
154
|
+
}
|
|
155
|
+
|
|
156
|
+
// Shallow-validate each entry: key is a string, value matches the
|
|
157
|
+
// sha256:<hex> shape. Malformed entries are dropped silently (they'll
|
|
158
|
+
// be recomputed on the next upgrade).
|
|
159
|
+
const cleaned = {};
|
|
160
|
+
const HASH_RE = /^sha256:[0-9a-f]{64}$/;
|
|
161
|
+
for (const [k, v] of Object.entries(hashes)) {
|
|
162
|
+
if (typeof k !== 'string' || typeof v !== 'string') continue;
|
|
163
|
+
if (!HASH_RE.test(v)) continue;
|
|
164
|
+
// Normalize keys to POSIX in case an older writer produced
|
|
165
|
+
// backslashed paths on Windows.
|
|
166
|
+
cleaned[k.split('\\').join('/')] = v;
|
|
167
|
+
}
|
|
168
|
+
|
|
169
|
+
return { schemaVersion, hashes: cleaned };
|
|
170
|
+
}
|
|
171
|
+
|
|
172
|
+
/**
|
|
173
|
+
* Write .sdd-meta.json with the given hashes map. Non-fatal on failure —
|
|
174
|
+
* logs a warning but does NOT throw. The next upgrade will recompute and
|
|
175
|
+
* try again.
|
|
176
|
+
*/
|
|
177
|
+
function writeMeta(dest, hashes) {
|
|
178
|
+
const p = path.join(dest, META_FILE);
|
|
179
|
+
const payload = {
|
|
180
|
+
schemaVersion: CURRENT_SCHEMA_VERSION,
|
|
181
|
+
hashes,
|
|
182
|
+
};
|
|
183
|
+
try {
|
|
184
|
+
fs.writeFileSync(p, JSON.stringify(payload, null, 2) + '\n', 'utf8');
|
|
185
|
+
} catch (e) {
|
|
186
|
+
console.warn(
|
|
187
|
+
` ⚠ Failed to write .sdd-meta.json: ${e.code || e.message}. ` +
|
|
188
|
+
`Next upgrade will fall back to content compare.`
|
|
189
|
+
);
|
|
190
|
+
}
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/**
|
|
194
|
+
* Compute the full set of POSIX paths that SHOULD have a hash entry for
|
|
195
|
+
* the given (aiTools, projectType) combination. Used for two purposes:
|
|
196
|
+
*
|
|
197
|
+
* 1. Pruning: remove hash entries for files that are expected-absent
|
|
198
|
+
* (e.g., single-stack project removed frontend agents) — but NOT for
|
|
199
|
+
* files the user temporarily deleted manually (those get recreated
|
|
200
|
+
* on the next upgrade, so their hash should persist).
|
|
201
|
+
*
|
|
202
|
+
* 2. Install-time hashing: iterate this set, compute each file's hash
|
|
203
|
+
* if the file exists on disk.
|
|
204
|
+
*
|
|
205
|
+
* v0.17.0 scope: template agents (.claude/agents/*, .gemini/agents/*)
|
|
206
|
+
* + AGENTS.md. SKILL.md / ticket-template.md / documentation-standards.mdc
|
|
207
|
+
* are tracked with wholesale-recopy + stack-adaptations on every upgrade
|
|
208
|
+
* (v0.16.10 behavior); they are NOT in this set.
|
|
209
|
+
*/
|
|
210
|
+
function expectedSmartDiffTrackedPaths(aiTools, projectType) {
|
|
211
|
+
const paths = new Set();
|
|
212
|
+
|
|
213
|
+
const toolDirs = [];
|
|
214
|
+
if (aiTools !== 'gemini') toolDirs.push('.claude');
|
|
215
|
+
if (aiTools !== 'claude') toolDirs.push('.gemini');
|
|
216
|
+
|
|
217
|
+
const agents = TEMPLATE_AGENTS.filter((a) => {
|
|
218
|
+
if (projectType === 'backend' && FRONTEND_AGENTS.includes(a)) return false;
|
|
219
|
+
if (projectType === 'frontend' && BACKEND_AGENTS.includes(a)) return false;
|
|
220
|
+
return true;
|
|
221
|
+
});
|
|
222
|
+
|
|
223
|
+
for (const dir of toolDirs) {
|
|
224
|
+
for (const agent of agents) {
|
|
225
|
+
paths.add(`${dir}/agents/${agent}`);
|
|
226
|
+
}
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
paths.add('AGENTS.md');
|
|
230
|
+
|
|
231
|
+
return paths;
|
|
232
|
+
}
|
|
233
|
+
|
|
234
|
+
/**
|
|
235
|
+
* Remove hash entries from `hashes` that are NOT in the expected set
|
|
236
|
+
* for the current (aiTools, projectType). Returns a new object.
|
|
237
|
+
*
|
|
238
|
+
* This does NOT prune based on on-disk presence — a user who temporarily
|
|
239
|
+
* deletes an agent file keeps its hash so the next upgrade can recreate
|
|
240
|
+
* the file from the template and restore the hash map cleanly
|
|
241
|
+
* (Gemini M3 fix).
|
|
242
|
+
*/
|
|
243
|
+
function pruneExpectedAbsent(hashes, aiTools, projectType) {
|
|
244
|
+
const expected = expectedSmartDiffTrackedPaths(aiTools, projectType);
|
|
245
|
+
const pruned = {};
|
|
246
|
+
for (const [k, v] of Object.entries(hashes)) {
|
|
247
|
+
if (expected.has(k)) pruned[k] = v;
|
|
248
|
+
}
|
|
249
|
+
return pruned;
|
|
250
|
+
}
|
|
251
|
+
|
|
252
|
+
/**
|
|
253
|
+
* Compute install-time hashes for a newly-populated project. Walks the
|
|
254
|
+
* expected set and hashes any file that exists on disk, EXCLUDING any
|
|
255
|
+
* path that's in `excludeSet` (e.g., `--init` encountered a pre-existing
|
|
256
|
+
* file and skipped it — the user owns that content, we must NOT mark it
|
|
257
|
+
* as tool-canonical or the next upgrade would overwrite user content.
|
|
258
|
+
* Codex round 2 P1 fix).
|
|
259
|
+
*
|
|
260
|
+
* @param {string} dest - Project root
|
|
261
|
+
* @param {string} aiTools - 'claude' | 'gemini' | 'both'
|
|
262
|
+
* @param {string} projectType - 'backend' | 'frontend' | 'fullstack'
|
|
263
|
+
* @param {Set<string>|Iterable<string>|null} excludeSet - POSIX paths to exclude. Null → no exclusion.
|
|
264
|
+
*/
|
|
265
|
+
function computeInstallHashes(dest, aiTools, projectType, excludeSet = null) {
|
|
266
|
+
const excluded = excludeSet ? new Set(excludeSet) : null;
|
|
267
|
+
const hashes = {};
|
|
268
|
+
for (const posixPath of expectedSmartDiffTrackedPaths(aiTools, projectType)) {
|
|
269
|
+
if (excluded && excluded.has(posixPath)) continue;
|
|
270
|
+
const absPath = path.join(dest, ...posixPath.split('/'));
|
|
271
|
+
const hash = hashFileOnDisk(absPath);
|
|
272
|
+
if (hash !== null) {
|
|
273
|
+
hashes[posixPath] = hash;
|
|
274
|
+
}
|
|
275
|
+
}
|
|
276
|
+
return hashes;
|
|
277
|
+
}
|
|
278
|
+
|
|
279
|
+
module.exports = {
|
|
280
|
+
META_FILE,
|
|
281
|
+
CURRENT_SCHEMA_VERSION,
|
|
282
|
+
computeHash,
|
|
283
|
+
hashFileOnDisk,
|
|
284
|
+
toPosix,
|
|
285
|
+
normalizeForCompare,
|
|
286
|
+
readMeta,
|
|
287
|
+
writeMeta,
|
|
288
|
+
expectedSmartDiffTrackedPaths,
|
|
289
|
+
pruneExpectedAbsent,
|
|
290
|
+
computeInstallHashes,
|
|
291
|
+
};
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
'use strict';
|
|
2
|
+
|
|
3
|
+
/**
|
|
4
|
+
* SDD DevFlow stack-specific adaptations — shared module (v0.17.0+).
|
|
5
|
+
*
|
|
6
|
+
* Extracted from lib/init-generator.js `adaptCopiedFiles` in v0.17.0 so
|
|
7
|
+
* the upgrade path can re-apply the same transformations after a
|
|
8
|
+
* hash-based smart-diff replacement. Previously init-generator.js ran
|
|
9
|
+
* these adaptations on install but upgrade-generator.js did not, so an
|
|
10
|
+
* init'd project upgrading would lose its stack customizations — the
|
|
11
|
+
* cross-path drift discovered during v0.16.10 implementation.
|
|
12
|
+
*
|
|
13
|
+
* Public API:
|
|
14
|
+
*
|
|
15
|
+
* applyStackAdaptations(dest, scan, config, allowlist = null)
|
|
16
|
+
* → walks the filesystem, applies adaptation rules to each file in
|
|
17
|
+
* the candidate set, respects the allowlist (upgrade path uses
|
|
18
|
+
* this to avoid touching preserved user-edited files). Returns the
|
|
19
|
+
* list of POSIX relative paths that were touched.
|
|
20
|
+
*
|
|
21
|
+
* applyStackAdaptationsToContent(content, posixRelativePath, scan, config)
|
|
22
|
+
* → pure, in-memory variant. Returns the adapted content for a
|
|
23
|
+
* single file. Used by upgrade-generator.js's FALLBACK path
|
|
24
|
+
* (when .sdd-meta.json is missing) to construct the "what init
|
|
25
|
+
* would have written" comparison target. This is critical for
|
|
26
|
+
* pre-v0.17.0 --init projects on their first v0.17.0 upgrade
|
|
27
|
+
* (Gemini M1 fix from plan v1.0 review).
|
|
28
|
+
*
|
|
29
|
+
* Idempotency invariant: every rule's source pattern MUST NOT appear in
|
|
30
|
+
* its own replacement value. The current rules satisfy this because they
|
|
31
|
+
* replace literal template strings like "Prisma ORM, and PostgreSQL"
|
|
32
|
+
* with "Mongoose, and MongoDB" — the source no longer appears after one
|
|
33
|
+
* pass. Verified by smoke scenario 56 (run every rule twice, assert
|
|
34
|
+
* second application is a no-op).
|
|
35
|
+
*
|
|
36
|
+
* Ordering: some rules run in phases. Phase 1 ("Zod data schemas" →
|
|
37
|
+
* "validation schemas") MUST run before phase 2 ("validation schemas in
|
|
38
|
+
* `shared/src/schemas/`" → "validation schemas") because phase 2's
|
|
39
|
+
* source depends on phase 1's replacement having happened. The rule
|
|
40
|
+
* arrays preserve this ordering; callers must apply them in sequence
|
|
41
|
+
* per file.
|
|
42
|
+
*/
|
|
43
|
+
|
|
44
|
+
const fs = require('node:fs');
|
|
45
|
+
const path = require('node:path');
|
|
46
|
+
|
|
47
|
+
const { toPosix } = require('./meta');
|
|
48
|
+
|
|
49
|
+
/**
|
|
50
|
+
* Compute the ordered list of [from, to] replacement rules for a given
|
|
51
|
+
* (file, scan, config). Rules are pure data — no filesystem access.
|
|
52
|
+
*
|
|
53
|
+
* Returns null if this file has no adaptations for the given project
|
|
54
|
+
* state (e.g., a Zod project's backend-developer.md needs no Zod
|
|
55
|
+
* substitutions).
|
|
56
|
+
*
|
|
57
|
+
* The rules here mirror the imperative body of the original
|
|
58
|
+
* lib/init-generator.js adaptCopiedFiles function. Extracting them into
|
|
59
|
+
* a data-driven table allows both file-based and in-memory application.
|
|
60
|
+
*/
|
|
61
|
+
function computeRulesFor(posixRelativePath, scan, config) {
|
|
62
|
+
const backend = scan.backend || {};
|
|
63
|
+
const orm = backend.orm || 'your ORM';
|
|
64
|
+
const db = backend.db || 'your database';
|
|
65
|
+
const validation = backend.validation;
|
|
66
|
+
const structure = scan.srcStructure || {};
|
|
67
|
+
const arch = structure.pattern || 'ddd';
|
|
68
|
+
|
|
69
|
+
// Phase 1: Zod → generic validation (applies only when validation !== 'Zod').
|
|
70
|
+
const zodReplacements = [
|
|
71
|
+
['Zod data schemas', 'validation schemas'],
|
|
72
|
+
['Zod schemas', 'validation schemas'],
|
|
73
|
+
];
|
|
74
|
+
|
|
75
|
+
// Phase 2: shared/src/schemas/ path cleanup. Applied AFTER phase 1, so
|
|
76
|
+
// these match the post-replacement text.
|
|
77
|
+
const schemaPathReplacements = [
|
|
78
|
+
['validation schemas in `shared/src/schemas/` if applicable', 'validation schemas if applicable'],
|
|
79
|
+
['validation schemas in `shared/src/schemas/` (if shared workspace exists)', 'validation schemas (if shared workspace exists)'],
|
|
80
|
+
['validation schemas in `shared/src/schemas/`', 'validation schemas'],
|
|
81
|
+
['validation schemas (`shared/src/schemas/`)', 'validation schemas'],
|
|
82
|
+
['`shared/src/schemas/` (if exists) for current validation schemas', 'project validation schemas'],
|
|
83
|
+
// Gemini spec-creator: no "Zod" prefix, standalone path reference
|
|
84
|
+
['and `shared/src/schemas/` (if exists)', ''],
|
|
85
|
+
['schemas vs `shared/src/schemas/`', 'validation schemas up to date'],
|
|
86
|
+
];
|
|
87
|
+
|
|
88
|
+
// ORM/DB replacements for backend agents. Only apply when the detected
|
|
89
|
+
// ORM differs from Prisma (the template default) OR no ORM was
|
|
90
|
+
// detected at all (replace with generic text).
|
|
91
|
+
let ormReplacements = [];
|
|
92
|
+
if (backend.orm && backend.orm !== 'Prisma') {
|
|
93
|
+
ormReplacements = [
|
|
94
|
+
['Prisma ORM, and PostgreSQL', `${orm}${db !== 'your database' ? `, and ${db}` : ''}`],
|
|
95
|
+
['Repository implementations (Prisma)', `Repository implementations (${orm})`],
|
|
96
|
+
];
|
|
97
|
+
} else if (!backend.orm) {
|
|
98
|
+
const dbLabel = db !== 'your database' ? `, and ${db}` : '';
|
|
99
|
+
ormReplacements = [
|
|
100
|
+
['Prisma ORM, and PostgreSQL', dbLabel ? dbLabel.slice(6) : 'your database'],
|
|
101
|
+
['Repository implementations (Prisma)', 'Repository implementations'],
|
|
102
|
+
];
|
|
103
|
+
}
|
|
104
|
+
|
|
105
|
+
// Architecture (DDD → layered) replacements, applied to backend agents
|
|
106
|
+
// when the detected structure is NOT DDD.
|
|
107
|
+
const archReplacementsBackendPlanner = (arch !== 'ddd') ? [
|
|
108
|
+
['specializing in Domain-Driven Design (DDD) layered architecture with deep knowledge of',
|
|
109
|
+
'specializing in layered architecture with deep knowledge of'],
|
|
110
|
+
['(DDD architecture)', '(layered architecture)'],
|
|
111
|
+
[/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
|
|
112
|
+
[/\d+\. Explore existing domain entities, services, validators, repositories\n/,
|
|
113
|
+
'5. Explore the codebase for existing patterns, layer structure, and reusable code\n'],
|
|
114
|
+
[/\d+\. Explore `backend\/src\/infrastructure\/` for existing repositories\n/, ''],
|
|
115
|
+
['following DDD layer order: Domain > Application > Infrastructure > Presentation > Tests',
|
|
116
|
+
'following the layer order defined in backend-standards.mdc'],
|
|
117
|
+
['Implementation Order (Domain > Application > Infrastructure > Presentation > Tests)',
|
|
118
|
+
'Implementation Order (see backend-standards.mdc for layer order)'],
|
|
119
|
+
['Follow DDD layer separation: Domain > Application > Infrastructure > Presentation',
|
|
120
|
+
'Follow the layer separation defined in backend-standards.mdc'],
|
|
121
|
+
] : [];
|
|
122
|
+
|
|
123
|
+
const archReplacementsBackendDeveloper = (arch !== 'ddd') ? [
|
|
124
|
+
['follows DDD layered architecture', 'follows layered architecture'],
|
|
125
|
+
['specializing in Domain-Driven Design (DDD) with', 'specializing in layered architecture with'],
|
|
126
|
+
['(DDD architecture)', '(layered architecture)'],
|
|
127
|
+
[/\d+\. Read `shared\/src\/schemas\/` \(if exists\) for current .* (?:data )?schemas\n/, ''],
|
|
128
|
+
['Follow the DDD layer order from the plan:',
|
|
129
|
+
'Follow the layer order from the plan (see backend-standards.mdc for project layers):'],
|
|
130
|
+
[/\d+\. \*\*Domain Layer\*\*: Entities, value objects, repository interfaces, domain errors\n/,
|
|
131
|
+
'1. **Data Layer**: Models, database operations, data access\n'],
|
|
132
|
+
[/\d+\. \*\*Application Layer\*\*: Services, validators, DTOs\n/,
|
|
133
|
+
'2. **Business Logic Layer**: Controllers, services, external integrations\n'],
|
|
134
|
+
[/\d+\. \*\*Infrastructure Layer\*\*: Repository implementations \([^)]*\), external integrations\n/,
|
|
135
|
+
'3. **Presentation Layer**: Routes, handlers, middleware\n'],
|
|
136
|
+
[/\d+\. \*\*Presentation Layer\*\*: Controllers, routes, middleware\n/,
|
|
137
|
+
'4. **Integration Layer**: Wiring, configuration, server registration\n'],
|
|
138
|
+
['Follow DDD layer order: Domain > Application > Infrastructure > Presentation.',
|
|
139
|
+
'Follow the layer order defined in backend-standards.mdc.'],
|
|
140
|
+
['**ALWAYS** follow DDD layer separation',
|
|
141
|
+
'**ALWAYS** follow the layer separation defined in backend-standards.mdc'],
|
|
142
|
+
['**ALWAYS** handle errors with custom domain error classes',
|
|
143
|
+
'**ALWAYS** handle errors following the patterns in backend-standards.mdc'],
|
|
144
|
+
['ALWAYS handle errors with domain error classes',
|
|
145
|
+
'ALWAYS handle errors following the patterns in backend-standards.mdc'],
|
|
146
|
+
[/- (?:\*\*MANDATORY\*\*: )?If modifying a DB schema → update .* schemas in `shared\/src\/schemas\/` BEFORE continuing\n/, ''],
|
|
147
|
+
] : [];
|
|
148
|
+
|
|
149
|
+
// Dispatch table keyed by the file's POSIX path suffix.
|
|
150
|
+
const isBackendAgent =
|
|
151
|
+
posixRelativePath.endsWith('/agents/backend-developer.md') ||
|
|
152
|
+
posixRelativePath.endsWith('/agents/backend-planner.md');
|
|
153
|
+
const isMultiPurposeAgent =
|
|
154
|
+
posixRelativePath.endsWith('/agents/spec-creator.md') ||
|
|
155
|
+
posixRelativePath.endsWith('/agents/production-code-validator.md') ||
|
|
156
|
+
posixRelativePath.endsWith('/agents/database-architect.md');
|
|
157
|
+
const isWorkflowSkill =
|
|
158
|
+
posixRelativePath.endsWith('/skills/development-workflow/SKILL.md') ||
|
|
159
|
+
posixRelativePath.endsWith('/skills/development-workflow/references/ticket-template.md');
|
|
160
|
+
|
|
161
|
+
// Accumulate rules for this file in the correct order.
|
|
162
|
+
const rules = [];
|
|
163
|
+
|
|
164
|
+
if (isBackendAgent) {
|
|
165
|
+
if (validation !== 'Zod') {
|
|
166
|
+
rules.push(...zodReplacements);
|
|
167
|
+
rules.push(...ormReplacements);
|
|
168
|
+
rules.push(...schemaPathReplacements);
|
|
169
|
+
} else if (ormReplacements.length > 0) {
|
|
170
|
+
rules.push(...ormReplacements);
|
|
171
|
+
}
|
|
172
|
+
// Architecture adaptations run after ORM/Zod.
|
|
173
|
+
if (posixRelativePath.endsWith('/agents/backend-planner.md')) {
|
|
174
|
+
rules.push(...archReplacementsBackendPlanner);
|
|
175
|
+
} else if (posixRelativePath.endsWith('/agents/backend-developer.md')) {
|
|
176
|
+
rules.push(...archReplacementsBackendDeveloper);
|
|
177
|
+
}
|
|
178
|
+
} else if (isMultiPurposeAgent) {
|
|
179
|
+
if (validation !== 'Zod') {
|
|
180
|
+
rules.push(...zodReplacements);
|
|
181
|
+
rules.push(...schemaPathReplacements);
|
|
182
|
+
}
|
|
183
|
+
} else if (isWorkflowSkill) {
|
|
184
|
+
if (validation !== 'Zod') {
|
|
185
|
+
rules.push(...zodReplacements);
|
|
186
|
+
rules.push(...schemaPathReplacements);
|
|
187
|
+
}
|
|
188
|
+
}
|
|
189
|
+
|
|
190
|
+
return rules.length > 0 ? rules : null;
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
/**
|
|
194
|
+
* Apply an ordered list of [from, to] rules to a content string.
|
|
195
|
+
* Strings are replaced with `.replaceAll` (all occurrences). Regexes are
|
|
196
|
+
* replaced with `.replace` (respects the regex's own flags — `g` for
|
|
197
|
+
* global, absent for first-occurrence; the current rule set uses regexes
|
|
198
|
+
* without `g` because they target unique structural lines).
|
|
199
|
+
*/
|
|
200
|
+
function applyRulesToContent(content, rules) {
|
|
201
|
+
let result = content;
|
|
202
|
+
for (const [from, to] of rules) {
|
|
203
|
+
if (from instanceof RegExp) {
|
|
204
|
+
result = result.replace(from, to);
|
|
205
|
+
} else {
|
|
206
|
+
result = result.replaceAll(from, to);
|
|
207
|
+
}
|
|
208
|
+
}
|
|
209
|
+
return result;
|
|
210
|
+
}
|
|
211
|
+
|
|
212
|
+
/**
|
|
213
|
+
* Pure, in-memory stack adaptation. Returns the adapted content.
|
|
214
|
+
* Zero filesystem I/O. Safe to call repeatedly on the same input
|
|
215
|
+
* (idempotent by rule design).
|
|
216
|
+
*
|
|
217
|
+
* @param {string} content - Raw file content
|
|
218
|
+
* @param {string} posixRelativePath - e.g. ".claude/agents/backend-developer.md"
|
|
219
|
+
* @param {object} scan
|
|
220
|
+
* @param {object} config
|
|
221
|
+
* @returns {string}
|
|
222
|
+
*/
|
|
223
|
+
function applyStackAdaptationsToContent(content, posixRelativePath, scan, config) {
|
|
224
|
+
const rules = computeRulesFor(posixRelativePath, scan, config);
|
|
225
|
+
if (!rules) return content;
|
|
226
|
+
return applyRulesToContent(content, rules);
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
/**
|
|
230
|
+
* Candidate file list for stack adaptations. Mirrors the files touched
|
|
231
|
+
* by the original adaptCopiedFiles. Only files that exist on disk are
|
|
232
|
+
* returned.
|
|
233
|
+
*/
|
|
234
|
+
function candidateFilesFor(dest, aiTools, projectType) {
|
|
235
|
+
const toolDirs = [];
|
|
236
|
+
if (aiTools !== 'gemini') toolDirs.push('.claude');
|
|
237
|
+
if (aiTools !== 'claude') toolDirs.push('.gemini');
|
|
238
|
+
|
|
239
|
+
const results = [];
|
|
240
|
+
|
|
241
|
+
for (const dir of toolDirs) {
|
|
242
|
+
// Backend agents
|
|
243
|
+
results.push(`${dir}/agents/backend-developer.md`);
|
|
244
|
+
results.push(`${dir}/agents/backend-planner.md`);
|
|
245
|
+
// Multi-purpose agents
|
|
246
|
+
results.push(`${dir}/agents/spec-creator.md`);
|
|
247
|
+
results.push(`${dir}/agents/production-code-validator.md`);
|
|
248
|
+
results.push(`${dir}/agents/database-architect.md`);
|
|
249
|
+
// Workflow skill files
|
|
250
|
+
results.push(`${dir}/skills/development-workflow/SKILL.md`);
|
|
251
|
+
results.push(`${dir}/skills/development-workflow/references/ticket-template.md`);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
// Filter by on-disk presence AND by project-type (single-stack
|
|
255
|
+
// projects may have pruned backend-* files).
|
|
256
|
+
return results.filter((posixPath) => {
|
|
257
|
+
const absPath = path.join(dest, ...posixPath.split('/'));
|
|
258
|
+
return fs.existsSync(absPath);
|
|
259
|
+
});
|
|
260
|
+
}
|
|
261
|
+
|
|
262
|
+
/**
|
|
263
|
+
* Apply stack adaptations to files on disk.
|
|
264
|
+
*
|
|
265
|
+
* @param {string} dest - Project root
|
|
266
|
+
* @param {object} scan - scan() result
|
|
267
|
+
* @param {object} config - { projectType, aiTools, ... }
|
|
268
|
+
* @param {Set<string>|null} allowlist - POSIX paths permitted to be
|
|
269
|
+
* touched. If null, all candidate files are touched (install path).
|
|
270
|
+
* If a Set, only files whose POSIX path is IN the Set are touched
|
|
271
|
+
* (upgrade path — prevents running adaptations on preserved user
|
|
272
|
+
* files).
|
|
273
|
+
* @returns {string[]} POSIX relative paths that were touched (whether
|
|
274
|
+
* their content actually changed or not — callers should re-hash them)
|
|
275
|
+
*/
|
|
276
|
+
function applyStackAdaptations(dest, scan, config, allowlist = null) {
|
|
277
|
+
const touched = [];
|
|
278
|
+
const candidates = candidateFilesFor(dest, config.aiTools, config.projectType);
|
|
279
|
+
|
|
280
|
+
for (const posixPath of candidates) {
|
|
281
|
+
if (allowlist !== null && !allowlist.has(posixPath)) continue;
|
|
282
|
+
const absPath = path.join(dest, ...posixPath.split('/'));
|
|
283
|
+
let content;
|
|
284
|
+
try {
|
|
285
|
+
content = fs.readFileSync(absPath, 'utf8');
|
|
286
|
+
} catch {
|
|
287
|
+
continue;
|
|
288
|
+
}
|
|
289
|
+
const adapted = applyStackAdaptationsToContent(content, posixPath, scan, config);
|
|
290
|
+
if (adapted !== content) {
|
|
291
|
+
try {
|
|
292
|
+
fs.writeFileSync(absPath, adapted, 'utf8');
|
|
293
|
+
} catch (e) {
|
|
294
|
+
console.warn(` ⚠ Failed to write stack-adapted ${posixPath}: ${e.code || e.message}`);
|
|
295
|
+
continue;
|
|
296
|
+
}
|
|
297
|
+
}
|
|
298
|
+
touched.push(posixPath);
|
|
299
|
+
}
|
|
300
|
+
|
|
301
|
+
// Non-agent adaptations: documentation-standards.mdc is project-type-
|
|
302
|
+
// driven, not stack-driven. Keeps its own imperative branch here.
|
|
303
|
+
const docStdRelative = 'ai-specs/specs/documentation-standards.mdc';
|
|
304
|
+
const docStdPath = path.join(dest, docStdRelative);
|
|
305
|
+
if (
|
|
306
|
+
fs.existsSync(docStdPath) &&
|
|
307
|
+
(allowlist === null || allowlist.has(docStdRelative))
|
|
308
|
+
) {
|
|
309
|
+
try {
|
|
310
|
+
let content = fs.readFileSync(docStdPath, 'utf8');
|
|
311
|
+
if (config.projectType === 'backend') {
|
|
312
|
+
content = content.replace(/\| `ai-specs\/specs\/frontend-standards\.mdc` \|[^\n]*\n/, '');
|
|
313
|
+
content = content.replace(/\| `docs\/specs\/ui-components\.md` \|[^\n]*\n/, '');
|
|
314
|
+
content = content.replace(/ - UI component changes → `docs\/specs\/ui-components\.md`\n/, '');
|
|
315
|
+
} else if (config.projectType === 'frontend') {
|
|
316
|
+
content = content.replace(/\| `ai-specs\/specs\/backend-standards\.mdc` \|[^\n]*\n/, '');
|
|
317
|
+
content = content.replace(/\| `docs\/specs\/api-spec\.yaml` \|[^\n]*\n/, '');
|
|
318
|
+
}
|
|
319
|
+
fs.writeFileSync(docStdPath, content, 'utf8');
|
|
320
|
+
touched.push(docStdRelative);
|
|
321
|
+
} catch (e) {
|
|
322
|
+
console.warn(` ⚠ Failed to adapt documentation-standards.mdc: ${e.code || e.message}`);
|
|
323
|
+
}
|
|
324
|
+
}
|
|
325
|
+
|
|
326
|
+
return touched;
|
|
327
|
+
}
|
|
328
|
+
|
|
329
|
+
module.exports = {
|
|
330
|
+
applyStackAdaptations,
|
|
331
|
+
applyStackAdaptationsToContent,
|
|
332
|
+
computeRulesFor,
|
|
333
|
+
applyRulesToContent,
|
|
334
|
+
candidateFilesFor,
|
|
335
|
+
};
|