launchframe 0.4.9 → 0.4.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.amazonq/cli-agents/launchframe.json +1 -1
- package/.amazonq/rules/project.md +119 -80
- package/.augment/commands/launchframe.md +16 -0
- package/.claude/skills/launchframe/SKILL.md +16 -0
- package/.clinerules +119 -80
- package/.codex/skills/launchframe/SKILL.md +16 -0
- package/.continue/commands/launchframe.md +16 -0
- package/.continue/rules/project.md +119 -80
- package/.cursor/commands/launchframe.md +16 -0
- package/.gemini/commands/launchframe.toml +16 -0
- package/.github/copilot-instructions.md +119 -80
- package/.github/skills/launchframe/SKILL.md +16 -0
- package/.gitignore +4 -0
- package/.opencode/commands/launchframe.md +16 -0
- package/.windsurf/workflows/launchframe.md +16 -0
- package/AGENTS.md +2 -2
- package/README.md +38 -165
- package/bin/launchframe.mjs +380 -380
- package/docs/research/INSPECTION_GUIDE.md +117 -78
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/README.txt +16 -0
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/body-outer.html +2 -0
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/capture-meta.json +19 -0
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/document.html +2 -0
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/inline-styles.json +7 -0
- package/docs/research/example.com/page-inspection/example.com-2026-05-15T21-24-43-488Z/motion-summary.json +18 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/README.txt +16 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/body-outer.html +2 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/capture-meta.json +19 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/document.html +2 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/inline-styles.json +7 -0
- package/docs/research/page-captures/example.com-2026-05-15T21-21-31-863Z/motion-summary.json +18 -0
- package/package.json +5 -3
- package/scripts/page-inspection-dump.mjs +386 -0
- package/scripts/sync-agent-rules.sh +88 -88
- package/tsconfig.json +34 -34
|
@@ -0,0 +1,386 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Capture a live page into files you can attach for an AI agent:
|
|
4
|
+
* — Full serialized HTML after JavaScript runs (like "Copy ▸ outerHTML" on <html>)
|
|
5
|
+
* — Every CSS response the browser loads (plus all inline <style> blocks)
|
|
6
|
+
* — Motion digest: @keyframes blocks, animation* / transition* declarations
|
|
7
|
+
*
|
|
8
|
+
* Prerequisite (once per machine / after npm install):
|
|
9
|
+
* npx playwright install chromium
|
|
10
|
+
*
|
|
11
|
+
* Usage:
|
|
12
|
+
* node scripts/page-inspection-dump.mjs https://example.com
|
|
13
|
+
* node scripts/page-inspection-dump.mjs https://example.com --out ./my-capture
|
|
14
|
+
* node scripts/page-inspection-dump.mjs https://example.com --out-parent docs/research/example.com/page-inspection --scroll-full
|
|
15
|
+
* node scripts/page-inspection-dump.mjs https://example.com --viewport 390,844 --scroll-full
|
|
16
|
+
*
|
|
17
|
+
* Options:
|
|
18
|
+
* --out <dir> Exact output directory (default: docs/research/page-captures/<host>-<stamp>)
|
|
19
|
+
* --out-parent <dir> Parent dir; writes <host>-<iso-stamp>/ inside it (--out wins if both set)
|
|
20
|
+
* --viewport <W>,<H> Browser viewport (default: 1440,900)
|
|
21
|
+
* --wait-until <mode> load | domcontentloaded | networkidle (default: networkidle)
|
|
22
|
+
* --timeout <ms> Navigation timeout (default: 90000)
|
|
23
|
+
* --scroll-full Scroll to the bottom slowly to trigger lazy-loaded regions
|
|
24
|
+
* --no-css-files Skip writing individual .css files (still builds motion from inline + collected text)
|
|
25
|
+
*/
|
|
26
|
+
|
|
27
|
+
import { chromium } from "playwright";
|
|
28
|
+
import { mkdir, writeFile } from "node:fs/promises";
|
|
29
|
+
import { createHash } from "node:crypto";
|
|
30
|
+
import { dirname, join, resolve } from "node:path";
|
|
31
|
+
import { fileURLToPath } from "node:url";
|
|
32
|
+
|
|
33
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
34
|
+
|
|
35
|
+
function parseArgs(argv) {
|
|
36
|
+
const positional = [];
|
|
37
|
+
const flags = new Set();
|
|
38
|
+
/** @type {Record<string, string>} */
|
|
39
|
+
const opts = {};
|
|
40
|
+
for (let i = 2; i < argv.length; i++) {
|
|
41
|
+
const a = argv[i];
|
|
42
|
+
if (a === "--") continue;
|
|
43
|
+
if (a.startsWith("--")) {
|
|
44
|
+
const key = a.slice(2);
|
|
45
|
+
if (
|
|
46
|
+
key === "scroll-full" ||
|
|
47
|
+
key === "no-css-files"
|
|
48
|
+
) {
|
|
49
|
+
flags.add(key);
|
|
50
|
+
continue;
|
|
51
|
+
}
|
|
52
|
+
const next = argv[i + 1];
|
|
53
|
+
if (!next || next.startsWith("--")) {
|
|
54
|
+
console.error(`Missing value for --${key}`);
|
|
55
|
+
process.exit(1);
|
|
56
|
+
}
|
|
57
|
+
opts[key] = next;
|
|
58
|
+
i++;
|
|
59
|
+
continue;
|
|
60
|
+
}
|
|
61
|
+
positional.push(a);
|
|
62
|
+
}
|
|
63
|
+
return { positional, flags, opts };
|
|
64
|
+
}
|
|
65
|
+
|
|
66
|
+
function sanitizeFilenamePart(s) {
|
|
67
|
+
return s.replace(/[^a-zA-Z0-9._-]+/g, "_").slice(0, 120);
|
|
68
|
+
}
|
|
69
|
+
|
|
70
|
+
/**
|
|
71
|
+
* @param {string} css
|
|
72
|
+
* @returns {{ name: string, block: string }[]}
|
|
73
|
+
*/
|
|
74
|
+
function extractKeyframeBlocks(css) {
|
|
75
|
+
const out = [];
|
|
76
|
+
const re = /@keyframes\s+([^/{[\s]+)\s*\{/gi;
|
|
77
|
+
let m;
|
|
78
|
+
while ((m = re.exec(css)) !== null) {
|
|
79
|
+
const name = m[1].trim();
|
|
80
|
+
const openIdx = m.index + m[0].length - 1;
|
|
81
|
+
if (css[openIdx] !== "{") continue;
|
|
82
|
+
let depth = 0;
|
|
83
|
+
for (let i = openIdx; i < css.length; i++) {
|
|
84
|
+
const c = css[i];
|
|
85
|
+
if (c === "{") depth++;
|
|
86
|
+
else if (c === "}") {
|
|
87
|
+
depth--;
|
|
88
|
+
if (depth === 0) {
|
|
89
|
+
out.push({
|
|
90
|
+
name,
|
|
91
|
+
block: css.slice(m.index, i + 1).trim(),
|
|
92
|
+
});
|
|
93
|
+
break;
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
}
|
|
97
|
+
}
|
|
98
|
+
return out;
|
|
99
|
+
}
|
|
100
|
+
|
|
101
|
+
/**
|
|
102
|
+
* @param {string} css
|
|
103
|
+
* @returns {string[]}
|
|
104
|
+
*/
|
|
105
|
+
function extractMotionLines(css) {
|
|
106
|
+
const lines = css.split(/\r?\n/);
|
|
107
|
+
const hits = [];
|
|
108
|
+
for (const line of lines) {
|
|
109
|
+
const t = line.trim();
|
|
110
|
+
if (!t || t.startsWith("/*")) continue;
|
|
111
|
+
const lower = t.toLowerCase();
|
|
112
|
+
if (
|
|
113
|
+
lower.includes("animation:") ||
|
|
114
|
+
lower.includes("animation-name:") ||
|
|
115
|
+
lower.includes("animation-duration:") ||
|
|
116
|
+
lower.includes("animation-timing-function:") ||
|
|
117
|
+
lower.includes("animation-delay:") ||
|
|
118
|
+
lower.includes("animation-iteration-count:") ||
|
|
119
|
+
lower.includes("animation-direction:") ||
|
|
120
|
+
lower.includes("animation-fill-mode:") ||
|
|
121
|
+
lower.includes("animation-play-state:") ||
|
|
122
|
+
lower.includes("transition:") ||
|
|
123
|
+
lower.includes("transition-property:") ||
|
|
124
|
+
lower.includes("transition-duration:") ||
|
|
125
|
+
lower.includes("transition-timing-function:") ||
|
|
126
|
+
lower.includes("transition-delay:") ||
|
|
127
|
+
lower.includes("will-change:") ||
|
|
128
|
+
lower.includes("transform:") ||
|
|
129
|
+
lower.includes("@keyframes")
|
|
130
|
+
) {
|
|
131
|
+
hits.push(t);
|
|
132
|
+
}
|
|
133
|
+
}
|
|
134
|
+
return hits;
|
|
135
|
+
}
|
|
136
|
+
|
|
137
|
+
/**
|
|
138
|
+
* @param {import('playwright').Page} page
|
|
139
|
+
*/
|
|
140
|
+
async function scrollFullPage(page) {
|
|
141
|
+
await page.evaluate(async () => {
|
|
142
|
+
const delay = (ms) => new Promise((r) => setTimeout(r, ms));
|
|
143
|
+
const step = Math.max(240, Math.floor(window.innerHeight * 0.85));
|
|
144
|
+
const max = Math.max(
|
|
145
|
+
document.documentElement.scrollHeight,
|
|
146
|
+
document.body?.scrollHeight ?? 0,
|
|
147
|
+
);
|
|
148
|
+
for (let y = 0; y < max; y += step) {
|
|
149
|
+
window.scrollTo(0, y);
|
|
150
|
+
await delay(120);
|
|
151
|
+
}
|
|
152
|
+
window.scrollTo(0, 0);
|
|
153
|
+
await delay(200);
|
|
154
|
+
});
|
|
155
|
+
}
|
|
156
|
+
|
|
157
|
+
async function runCapture() {
|
|
158
|
+
const { positional, flags, opts } = parseArgs(process.argv);
|
|
159
|
+
const urlRaw = positional[0];
|
|
160
|
+
if (!urlRaw) {
|
|
161
|
+
console.error(`Usage: node scripts/page-inspection-dump.mjs <url> [options]`);
|
|
162
|
+
process.exit(1);
|
|
163
|
+
}
|
|
164
|
+
|
|
165
|
+
let url;
|
|
166
|
+
try {
|
|
167
|
+
url = new URL(urlRaw).toString();
|
|
168
|
+
} catch {
|
|
169
|
+
console.error(`Invalid URL: ${urlRaw}`);
|
|
170
|
+
process.exit(1);
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
const viewportParts = (opts.viewport ?? "1440,900").split(",").map((s) => s.trim());
|
|
174
|
+
const vw = Number(viewportParts[0]) || 1440;
|
|
175
|
+
const vh = Number(viewportParts[1] ?? viewportParts[0]) || 900;
|
|
176
|
+
|
|
177
|
+
const waitUntil = opts["wait-until"] ?? "networkidle";
|
|
178
|
+
if (!["load", "domcontentloaded", "networkidle", "commit"].includes(waitUntil)) {
|
|
179
|
+
console.error(`Invalid --wait-until: ${waitUntil}`);
|
|
180
|
+
process.exit(1);
|
|
181
|
+
}
|
|
182
|
+
|
|
183
|
+
const navigationTimeout = Number(opts.timeout ?? "90000");
|
|
184
|
+
|
|
185
|
+
const host = new URL(url).hostname || "page";
|
|
186
|
+
const stamp = new Date().toISOString().replace(/[:.]/g, "-");
|
|
187
|
+
const defaultOut = join(
|
|
188
|
+
__dirname,
|
|
189
|
+
"..",
|
|
190
|
+
"docs",
|
|
191
|
+
"research",
|
|
192
|
+
"page-captures",
|
|
193
|
+
`${sanitizeFilenamePart(host)}-${stamp}`,
|
|
194
|
+
);
|
|
195
|
+
let outDir;
|
|
196
|
+
if (opts.out) {
|
|
197
|
+
outDir = resolve(process.cwd(), opts.out);
|
|
198
|
+
} else if (opts["out-parent"]) {
|
|
199
|
+
outDir = join(
|
|
200
|
+
resolve(process.cwd(), opts["out-parent"]),
|
|
201
|
+
`${sanitizeFilenamePart(host)}-${stamp}`,
|
|
202
|
+
);
|
|
203
|
+
} else {
|
|
204
|
+
outDir = defaultOut;
|
|
205
|
+
}
|
|
206
|
+
|
|
207
|
+
await mkdir(outDir, { recursive: true });
|
|
208
|
+
|
|
209
|
+
/** @type {{ url: string; bytes: number; text: string }[]} */
|
|
210
|
+
const cssBodies = [];
|
|
211
|
+
|
|
212
|
+
const browser = await chromium.launch({ headless: true });
|
|
213
|
+
try {
|
|
214
|
+
const context = await browser.newContext({
|
|
215
|
+
viewport: { width: vw, height: vh },
|
|
216
|
+
userAgent:
|
|
217
|
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 LaunchFramePageDump/1",
|
|
218
|
+
});
|
|
219
|
+
const page = await context.newPage();
|
|
220
|
+
|
|
221
|
+
page.on("response", async (response) => {
|
|
222
|
+
try {
|
|
223
|
+
const ct = (response.headers()["content-type"] ?? "").toLowerCase();
|
|
224
|
+
const rt = response.request().resourceType();
|
|
225
|
+
if (rt !== "stylesheet" && !ct.includes("text/css")) return;
|
|
226
|
+
const buf = await response.body();
|
|
227
|
+
const text = buf.toString("utf8");
|
|
228
|
+
cssBodies.push({ url: response.url(), bytes: buf.byteLength, text });
|
|
229
|
+
} catch {
|
|
230
|
+
// ignore
|
|
231
|
+
}
|
|
232
|
+
});
|
|
233
|
+
|
|
234
|
+
await page.goto(url, {
|
|
235
|
+
waitUntil: /** @type {'load'} */ (waitUntil),
|
|
236
|
+
timeout: navigationTimeout,
|
|
237
|
+
});
|
|
238
|
+
|
|
239
|
+
if (flags.has("scroll-full")) {
|
|
240
|
+
await scrollFullPage(page);
|
|
241
|
+
}
|
|
242
|
+
|
|
243
|
+
const [fullHtml, bodyOuter, inlineStyles, scriptInventory] = await page.evaluate(() => {
|
|
244
|
+
const styles = Array.from(document.querySelectorAll("style")).map((el, i) => ({
|
|
245
|
+
index: i,
|
|
246
|
+
media: el.getAttribute("media") ?? "",
|
|
247
|
+
text: el.textContent ?? "",
|
|
248
|
+
}));
|
|
249
|
+
const scripts = Array.from(document.querySelectorAll("script")).map((el, i) => ({
|
|
250
|
+
index: i,
|
|
251
|
+
src: el.getAttribute("src") ?? "",
|
|
252
|
+
type: el.getAttribute("type") ?? "",
|
|
253
|
+
async: el.hasAttribute("async"),
|
|
254
|
+
defer: el.hasAttribute("defer"),
|
|
255
|
+
inlineChars: (el.textContent ?? "").length,
|
|
256
|
+
}));
|
|
257
|
+
return [
|
|
258
|
+
document.documentElement.outerHTML,
|
|
259
|
+
document.body ? document.body.outerHTML : "",
|
|
260
|
+
styles,
|
|
261
|
+
scripts,
|
|
262
|
+
];
|
|
263
|
+
});
|
|
264
|
+
|
|
265
|
+
await writeFile(join(outDir, "document.html"), fullHtml, "utf8");
|
|
266
|
+
if (bodyOuter) {
|
|
267
|
+
await writeFile(join(outDir, "body-outer.html"), bodyOuter, "utf8");
|
|
268
|
+
}
|
|
269
|
+
|
|
270
|
+
await writeFile(
|
|
271
|
+
join(outDir, "inline-styles.json"),
|
|
272
|
+
JSON.stringify(inlineStyles, null, 2),
|
|
273
|
+
"utf8",
|
|
274
|
+
);
|
|
275
|
+
|
|
276
|
+
if (!flags.has("no-css-files")) {
|
|
277
|
+
for (let i = 0; i < cssBodies.length; i++) {
|
|
278
|
+
const { url: cssUrl, text } = cssBodies[i];
|
|
279
|
+
const hash = createHash("sha256").update(text).digest("hex").slice(0, 12);
|
|
280
|
+
let base = "stylesheet";
|
|
281
|
+
try {
|
|
282
|
+
const u = new URL(cssUrl);
|
|
283
|
+
base = sanitizeFilenamePart(
|
|
284
|
+
(u.pathname.split("/").filter(Boolean).pop() || "stylesheet").replace(/\.css$/i, ""),
|
|
285
|
+
);
|
|
286
|
+
} catch {
|
|
287
|
+
base = "stylesheet";
|
|
288
|
+
}
|
|
289
|
+
const filename = `${String(i + 1).padStart(3, "0")}-${base}-${hash}.css`;
|
|
290
|
+
await writeFile(join(outDir, filename), text, "utf8");
|
|
291
|
+
}
|
|
292
|
+
}
|
|
293
|
+
|
|
294
|
+
const inlineCssText = inlineStyles
|
|
295
|
+
.map((s) => `/* inline style #${s.index} media=${s.media || "all"} */\n${s.text}`)
|
|
296
|
+
.join("\n\n");
|
|
297
|
+
|
|
298
|
+
const aggregatedCss = [
|
|
299
|
+
inlineCssText,
|
|
300
|
+
...cssBodies.map((c) => `/* network stylesheet: ${c.url} */\n${c.text}`),
|
|
301
|
+
].join("\n\n");
|
|
302
|
+
|
|
303
|
+
const keyframes = extractKeyframeBlocks(aggregatedCss);
|
|
304
|
+
const motionLines = extractMotionLines(aggregatedCss);
|
|
305
|
+
|
|
306
|
+
/** @type {Record<string, unknown>} */
|
|
307
|
+
const motionSummary = {
|
|
308
|
+
sourceUrl: url,
|
|
309
|
+
generatedAt: new Date().toISOString(),
|
|
310
|
+
viewport: { width: vw, height: vh },
|
|
311
|
+
counts: {
|
|
312
|
+
keyframeRules: keyframes.length,
|
|
313
|
+
motionRelatedLines: motionLines.length,
|
|
314
|
+
inlineStyleTags: inlineStyles.length,
|
|
315
|
+
networkStylesheets: cssBodies.length,
|
|
316
|
+
scriptElements: scriptInventory.length,
|
|
317
|
+
},
|
|
318
|
+
keyframes: keyframes.map((k) => ({
|
|
319
|
+
name: k.name,
|
|
320
|
+
blockChars: k.block.length,
|
|
321
|
+
block:
|
|
322
|
+
k.block.length > 12000
|
|
323
|
+
? `${k.block.slice(0, 12000)}\n/* …truncated… */\n`
|
|
324
|
+
: k.block,
|
|
325
|
+
})),
|
|
326
|
+
motionLinesSample: motionLines.slice(0, 800),
|
|
327
|
+
motionLinesTotal: motionLines.length,
|
|
328
|
+
};
|
|
329
|
+
|
|
330
|
+
await writeFile(
|
|
331
|
+
join(outDir, "motion-summary.json"),
|
|
332
|
+
JSON.stringify(motionSummary, null, 2),
|
|
333
|
+
"utf8",
|
|
334
|
+
);
|
|
335
|
+
|
|
336
|
+
/** @type {Record<string, unknown>} */
|
|
337
|
+
const meta = {
|
|
338
|
+
sourceUrl: url,
|
|
339
|
+
generatedAt: new Date().toISOString(),
|
|
340
|
+
viewport: { width: vw, height: vh },
|
|
341
|
+
waitUntil,
|
|
342
|
+
options: {
|
|
343
|
+
scrollFull: flags.has("scroll-full"),
|
|
344
|
+
wroteCssFiles: !flags.has("no-css-files"),
|
|
345
|
+
},
|
|
346
|
+
outputs: {
|
|
347
|
+
documentHtmlBytes: Buffer.byteLength(fullHtml, "utf8"),
|
|
348
|
+
bodyOuterHtmlBytes: bodyOuter ? Buffer.byteLength(bodyOuter, "utf8") : 0,
|
|
349
|
+
networkStylesheets: cssBodies.map((c) => ({ url: c.url, bytes: c.bytes })),
|
|
350
|
+
},
|
|
351
|
+
scripts: scriptInventory,
|
|
352
|
+
};
|
|
353
|
+
|
|
354
|
+
await writeFile(join(outDir, "capture-meta.json"), JSON.stringify(meta, null, 2), "utf8");
|
|
355
|
+
|
|
356
|
+
await writeFile(
|
|
357
|
+
join(outDir, "README.txt"),
|
|
358
|
+
[
|
|
359
|
+
"LaunchFrame page inspection capture",
|
|
360
|
+
"====================================",
|
|
361
|
+
`Source: ${url}`,
|
|
362
|
+
"",
|
|
363
|
+
"Files:",
|
|
364
|
+
" document.html — Full <html> outerHTML after JS (attach this for an agent)",
|
|
365
|
+
" body-outer.html — <body> outerHTML only (smaller)",
|
|
366
|
+
" inline-styles.json — All inline <style> tag contents",
|
|
367
|
+
" motion-summary.json— @keyframes + animation/transition-related lines",
|
|
368
|
+
" capture-meta.json — Viewport, stylesheet URLs, <script> inventory",
|
|
369
|
+
" *.css — Stylesheets observed on the network (unless --no-css-files)",
|
|
370
|
+
"",
|
|
371
|
+
"Notes:",
|
|
372
|
+
" • Some sites lazy-load CSS; use --scroll-full to force more requests.",
|
|
373
|
+
" • Cross-origin styles you cannot access via JS are still captured here if the browser downloaded them.",
|
|
374
|
+
" • Motion inside bundled JS (not CSS) is not fully extracted—check script src in capture-meta.json.",
|
|
375
|
+
"",
|
|
376
|
+
].join("\n"),
|
|
377
|
+
"utf8",
|
|
378
|
+
);
|
|
379
|
+
|
|
380
|
+
console.log(outDir);
|
|
381
|
+
} finally {
|
|
382
|
+
await browser.close();
|
|
383
|
+
}
|
|
384
|
+
}
|
|
385
|
+
|
|
386
|
+
await runCapture();
|
|
@@ -1,88 +1,88 @@
|
|
|
1
|
-
#!/usr/bin/env bash
|
|
2
|
-
#
|
|
3
|
-
# sync-agent-rules.sh — Generate AI agent config files from AGENTS.md
|
|
4
|
-
#
|
|
5
|
-
# AGENTS.md is the single source of truth. This script creates copies
|
|
6
|
-
# for agents that don't read AGENTS.md natively (Cline, Continue,
|
|
7
|
-
# Amazon Q, GitHub Copilot Chat).
|
|
8
|
-
#
|
|
9
|
-
# Usage:
|
|
10
|
-
# bash scripts/sync-agent-rules.sh
|
|
11
|
-
#
|
|
12
|
-
# Agents that DON'T need generated files (they read AGENTS.md natively):
|
|
13
|
-
# Codex CLI, OpenCode, Cursor, Windsurf, Copilot Coding Agent,
|
|
14
|
-
# Roo Code, Aider, Augment Code
|
|
15
|
-
#
|
|
16
|
-
# Agents with their own thin pointer files (created manually):
|
|
17
|
-
# Claude Code → CLAUDE.md (@AGENTS.md import)
|
|
18
|
-
# Gemini CLI → GEMINI.md (@AGENTS.md import)
|
|
19
|
-
# Cursor → .cursor/rules/project.mdc (pointer)
|
|
20
|
-
# Windsurf → .windsurfrules (pointer)
|
|
21
|
-
# Aider → .aider.conf.yml (read: [AGENTS.md])
|
|
22
|
-
|
|
23
|
-
set -euo pipefail
|
|
24
|
-
|
|
25
|
-
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
|
26
|
-
SOURCE="$REPO_ROOT/AGENTS.md"
|
|
27
|
-
|
|
28
|
-
if [[ ! -f "$SOURCE" ]]; then
|
|
29
|
-
echo "Error: AGENTS.md not found at $SOURCE" >&2
|
|
30
|
-
exit 1
|
|
31
|
-
fi
|
|
32
|
-
|
|
33
|
-
# Resolve @file imports (Claude Code syntax) into inline content.
|
|
34
|
-
# Lines like "@docs/research/INSPECTION_GUIDE.md" become the file's contents.
|
|
35
|
-
resolve_imports() {
|
|
36
|
-
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
37
|
-
line="${line%$'\r'}"
|
|
38
|
-
if [[ "$line" =~ ^@(.+)$ ]]; then
|
|
39
|
-
local import_path="${BASH_REMATCH[1]}"
|
|
40
|
-
local resolved="$REPO_ROOT/$import_path"
|
|
41
|
-
if [[ -f "$resolved" ]]; then
|
|
42
|
-
cat "$resolved"
|
|
43
|
-
echo ""
|
|
44
|
-
else
|
|
45
|
-
echo "<!-- Import not found: $import_path -->"
|
|
46
|
-
fi
|
|
47
|
-
else
|
|
48
|
-
echo "$line"
|
|
49
|
-
fi
|
|
50
|
-
done < "$SOURCE"
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
RESOLVED_CONTENT="$(resolve_imports)"
|
|
54
|
-
|
|
55
|
-
HEADER="<!-- AUTO-GENERATED from AGENTS.md — do not edit directly.
|
|
56
|
-
Run \`bash scripts/sync-agent-rules.sh\` to regenerate. -->"
|
|
57
|
-
|
|
58
|
-
# Helper: write a generated file with header
|
|
59
|
-
write_file() {
|
|
60
|
-
local target="$1"
|
|
61
|
-
local content="$2"
|
|
62
|
-
mkdir -p "$(dirname "$target")"
|
|
63
|
-
printf '%s\n\n%s\n' "$HEADER" "$content" > "$target"
|
|
64
|
-
echo " ✓ $target"
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
echo "Syncing agent rules from AGENTS.md..."
|
|
68
|
-
|
|
69
|
-
# GitHub Copilot Chat — .github/copilot-instructions.md
|
|
70
|
-
write_file "$REPO_ROOT/.github/copilot-instructions.md" "$RESOLVED_CONTENT"
|
|
71
|
-
|
|
72
|
-
# Cline / Roo Code — .clinerules
|
|
73
|
-
write_file "$REPO_ROOT/.clinerules" "$RESOLVED_CONTENT"
|
|
74
|
-
|
|
75
|
-
# Continue — .continue/rules/project.md
|
|
76
|
-
CONTINUE_FRONTMATTER="---
|
|
77
|
-
description: Project conventions for AI Website Clone Template
|
|
78
|
-
alwaysApply: true
|
|
79
|
-
---"
|
|
80
|
-
write_file "$REPO_ROOT/.continue/rules/project.md" "$CONTINUE_FRONTMATTER
|
|
81
|
-
$RESOLVED_CONTENT"
|
|
82
|
-
|
|
83
|
-
# Amazon Q Developer — .amazonq/rules/project.md
|
|
84
|
-
write_file "$REPO_ROOT/.amazonq/rules/project.md" "$RESOLVED_CONTENT"
|
|
85
|
-
|
|
86
|
-
echo ""
|
|
87
|
-
echo "Done. Generated files are committed to the repo but sourced from AGENTS.md."
|
|
88
|
-
echo "Edit AGENTS.md, then re-run this script to update all agent configs."
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
#
|
|
3
|
+
# sync-agent-rules.sh — Generate AI agent config files from AGENTS.md
|
|
4
|
+
#
|
|
5
|
+
# AGENTS.md is the single source of truth. This script creates copies
|
|
6
|
+
# for agents that don't read AGENTS.md natively (Cline, Continue,
|
|
7
|
+
# Amazon Q, GitHub Copilot Chat).
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# bash scripts/sync-agent-rules.sh
|
|
11
|
+
#
|
|
12
|
+
# Agents that DON'T need generated files (they read AGENTS.md natively):
|
|
13
|
+
# Codex CLI, OpenCode, Cursor, Windsurf, Copilot Coding Agent,
|
|
14
|
+
# Roo Code, Aider, Augment Code
|
|
15
|
+
#
|
|
16
|
+
# Agents with their own thin pointer files (created manually):
|
|
17
|
+
# Claude Code → CLAUDE.md (@AGENTS.md import)
|
|
18
|
+
# Gemini CLI → GEMINI.md (@AGENTS.md import)
|
|
19
|
+
# Cursor → .cursor/rules/project.mdc (pointer)
|
|
20
|
+
# Windsurf → .windsurfrules (pointer)
|
|
21
|
+
# Aider → .aider.conf.yml (read: [AGENTS.md])
|
|
22
|
+
|
|
23
|
+
set -euo pipefail
|
|
24
|
+
|
|
25
|
+
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
|
26
|
+
SOURCE="$REPO_ROOT/AGENTS.md"
|
|
27
|
+
|
|
28
|
+
if [[ ! -f "$SOURCE" ]]; then
|
|
29
|
+
echo "Error: AGENTS.md not found at $SOURCE" >&2
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
# Resolve @file imports (Claude Code syntax) into inline content.
|
|
34
|
+
# Lines like "@docs/research/INSPECTION_GUIDE.md" become the file's contents.
|
|
35
|
+
resolve_imports() {
|
|
36
|
+
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
37
|
+
line="${line%$'\r'}"
|
|
38
|
+
if [[ "$line" =~ ^@(.+)$ ]]; then
|
|
39
|
+
local import_path="${BASH_REMATCH[1]}"
|
|
40
|
+
local resolved="$REPO_ROOT/$import_path"
|
|
41
|
+
if [[ -f "$resolved" ]]; then
|
|
42
|
+
cat "$resolved"
|
|
43
|
+
echo ""
|
|
44
|
+
else
|
|
45
|
+
echo "<!-- Import not found: $import_path -->"
|
|
46
|
+
fi
|
|
47
|
+
else
|
|
48
|
+
echo "$line"
|
|
49
|
+
fi
|
|
50
|
+
done < "$SOURCE"
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
RESOLVED_CONTENT="$(resolve_imports)"
|
|
54
|
+
|
|
55
|
+
HEADER="<!-- AUTO-GENERATED from AGENTS.md — do not edit directly.
|
|
56
|
+
Run \`bash scripts/sync-agent-rules.sh\` to regenerate. -->"
|
|
57
|
+
|
|
58
|
+
# Helper: write a generated file with header
|
|
59
|
+
write_file() {
|
|
60
|
+
local target="$1"
|
|
61
|
+
local content="$2"
|
|
62
|
+
mkdir -p "$(dirname "$target")"
|
|
63
|
+
printf '%s\n\n%s\n' "$HEADER" "$content" > "$target"
|
|
64
|
+
echo " ✓ $target"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
echo "Syncing agent rules from AGENTS.md..."
|
|
68
|
+
|
|
69
|
+
# GitHub Copilot Chat — .github/copilot-instructions.md
|
|
70
|
+
write_file "$REPO_ROOT/.github/copilot-instructions.md" "$RESOLVED_CONTENT"
|
|
71
|
+
|
|
72
|
+
# Cline / Roo Code — .clinerules
|
|
73
|
+
write_file "$REPO_ROOT/.clinerules" "$RESOLVED_CONTENT"
|
|
74
|
+
|
|
75
|
+
# Continue — .continue/rules/project.md
|
|
76
|
+
CONTINUE_FRONTMATTER="---
|
|
77
|
+
description: Project conventions for AI Website Clone Template
|
|
78
|
+
alwaysApply: true
|
|
79
|
+
---"
|
|
80
|
+
write_file "$REPO_ROOT/.continue/rules/project.md" "$CONTINUE_FRONTMATTER
|
|
81
|
+
$RESOLVED_CONTENT"
|
|
82
|
+
|
|
83
|
+
# Amazon Q Developer — .amazonq/rules/project.md
|
|
84
|
+
write_file "$REPO_ROOT/.amazonq/rules/project.md" "$RESOLVED_CONTENT"
|
|
85
|
+
|
|
86
|
+
echo ""
|
|
87
|
+
echo "Done. Generated files are committed to the repo but sourced from AGENTS.md."
|
|
88
|
+
echo "Edit AGENTS.md, then re-run this script to update all agent configs."
|
package/tsconfig.json
CHANGED
|
@@ -1,34 +1,34 @@
|
|
|
1
|
-
{
|
|
2
|
-
"compilerOptions": {
|
|
3
|
-
"target": "ES2017",
|
|
4
|
-
"lib": ["dom", "dom.iterable", "esnext"],
|
|
5
|
-
"allowJs": true,
|
|
6
|
-
"skipLibCheck": true,
|
|
7
|
-
"strict": true,
|
|
8
|
-
"noEmit": true,
|
|
9
|
-
"esModuleInterop": true,
|
|
10
|
-
"module": "esnext",
|
|
11
|
-
"moduleResolution": "bundler",
|
|
12
|
-
"resolveJsonModule": true,
|
|
13
|
-
"isolatedModules": true,
|
|
14
|
-
"jsx": "react-jsx",
|
|
15
|
-
"incremental": true,
|
|
16
|
-
"plugins": [
|
|
17
|
-
{
|
|
18
|
-
"name": "next"
|
|
19
|
-
}
|
|
20
|
-
],
|
|
21
|
-
"paths": {
|
|
22
|
-
"@/*": ["./src/*"]
|
|
23
|
-
}
|
|
24
|
-
},
|
|
25
|
-
"include": [
|
|
26
|
-
"next-env.d.ts",
|
|
27
|
-
"**/*.ts",
|
|
28
|
-
"**/*.tsx",
|
|
29
|
-
".next/types/**/*.ts",
|
|
30
|
-
".next/dev/types/**/*.ts",
|
|
31
|
-
"**/*.mts"
|
|
32
|
-
],
|
|
33
|
-
"exclude": ["node_modules"]
|
|
34
|
-
}
|
|
1
|
+
{
|
|
2
|
+
"compilerOptions": {
|
|
3
|
+
"target": "ES2017",
|
|
4
|
+
"lib": ["dom", "dom.iterable", "esnext"],
|
|
5
|
+
"allowJs": true,
|
|
6
|
+
"skipLibCheck": true,
|
|
7
|
+
"strict": true,
|
|
8
|
+
"noEmit": true,
|
|
9
|
+
"esModuleInterop": true,
|
|
10
|
+
"module": "esnext",
|
|
11
|
+
"moduleResolution": "bundler",
|
|
12
|
+
"resolveJsonModule": true,
|
|
13
|
+
"isolatedModules": true,
|
|
14
|
+
"jsx": "react-jsx",
|
|
15
|
+
"incremental": true,
|
|
16
|
+
"plugins": [
|
|
17
|
+
{
|
|
18
|
+
"name": "next"
|
|
19
|
+
}
|
|
20
|
+
],
|
|
21
|
+
"paths": {
|
|
22
|
+
"@/*": ["./src/*"]
|
|
23
|
+
}
|
|
24
|
+
},
|
|
25
|
+
"include": [
|
|
26
|
+
"next-env.d.ts",
|
|
27
|
+
"**/*.ts",
|
|
28
|
+
"**/*.tsx",
|
|
29
|
+
".next/types/**/*.ts",
|
|
30
|
+
".next/dev/types/**/*.ts",
|
|
31
|
+
"**/*.mts"
|
|
32
|
+
],
|
|
33
|
+
"exclude": ["node_modules"]
|
|
34
|
+
}
|