launchframe 0.4.13 → 0.4.14
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/.amazonq/cli-agents/launchframe.json +2 -2
- package/.amazonq/cli-agents/snapshot.json +1 -1
- package/.amazonq/rules/project.md +20 -11
- package/.augment/commands/launchframe.md +149 -35
- package/.augment/commands/snapshot.md +10 -0
- package/.claude/skills/launchframe/SKILL.md +149 -35
- package/.claude/skills/snapshot/SKILL.md +10 -0
- package/.clinerules +20 -11
- package/.codex/skills/launchframe/SKILL.md +149 -35
- package/.codex/skills/snapshot/SKILL.md +10 -0
- package/.continue/commands/launchframe.md +149 -35
- package/.continue/commands/snapshot.md +10 -0
- package/.continue/rules/project.md +20 -11
- package/.cursor/commands/launchframe.md +148 -34
- package/.cursor/commands/snapshot.md +10 -0
- package/.gemini/commands/launchframe.toml +149 -35
- package/.gemini/commands/snapshot.toml +10 -0
- package/.github/copilot-instructions.md +20 -11
- package/.github/skills/launchframe/SKILL.md +149 -35
- package/.github/skills/snapshot/SKILL.md +10 -0
- package/.opencode/commands/launchframe.md +149 -35
- package/.opencode/commands/snapshot.md +10 -0
- package/.windsurf/workflows/launchframe.md +148 -34
- package/.windsurf/workflows/snapshot.md +10 -0
- package/AGENTS.md +10 -7
- package/docs/research/INSPECTION_GUIDE.md +9 -3
- package/docs/research/LAUNCHFRAME_SUBAGENTS.md +31 -7
- package/package.json +3 -2
- package/scripts/mirror-snapshot-assets.mjs +257 -0
- package/scripts/sync-agent-rules.sh +88 -88
|
@@ -0,0 +1,257 @@
|
|
|
1
|
+
#!/usr/bin/env node
|
|
2
|
+
/**
|
|
3
|
+
* Mirror root-relative assets referenced by a captured document.html into `public/`.
|
|
4
|
+
*
|
|
5
|
+
* Why this exists:
|
|
6
|
+
* /launchframe and /snapshot use `npm run inspect:page` to capture a frozen copy
|
|
7
|
+
* of a live URL (document.html, body-outer.html, network *.css, motion-summary.json,
|
|
8
|
+
* capture-meta.json). When the captured HTML/CSS references **root-relative** paths
|
|
9
|
+
* like /_next/static/..., /assets/..., or /static/..., those paths resolve against
|
|
10
|
+
* localhost:3000 in dev — not the original origin — so the iframe (or a stand-in
|
|
11
|
+
* reference page) breaks unless we sideload those bytes locally.
|
|
12
|
+
*
|
|
13
|
+
* This script reads the **latest** capture under
|
|
14
|
+
* docs/research/<host>/page-inspection/<stamp>/ (preferred), or
|
|
15
|
+
* docs/research/page-captures/<host>-<stamp>/ (default fallback)
|
|
16
|
+
* for the host of LAUNCHFRAME_SOURCE_URL, scans the document.html + every CSS
|
|
17
|
+
* file for root-relative URLs, and downloads each one to `public/<same-path>`.
|
|
18
|
+
*
|
|
19
|
+
* What it is NOT:
|
|
20
|
+
* - It is NOT the implementation. Mirrored bundles are runtime fixtures so offline
|
|
21
|
+
* preview of `document.html` (or generated screenshots) match the live site.
|
|
22
|
+
* The hand-authored components under `src/components/**` are the deliverable.
|
|
23
|
+
* - It does not rewrite document.html or any source file.
|
|
24
|
+
* - It does not handle absolute https:// CDN URLs (the browser still fetches those
|
|
25
|
+
* normally; CDNs typically allow cross-origin loads). Use the existing inspection
|
|
26
|
+
* CSS files for offline CSS replay if you need it.
|
|
27
|
+
*
|
|
28
|
+
* Usage:
|
|
29
|
+
* npm run mirror:snapshot-assets
|
|
30
|
+
* node scripts/mirror-snapshot-assets.mjs --base https://www.example.com
|
|
31
|
+
* node scripts/mirror-snapshot-assets.mjs --capture docs/research/www.example.com/page-inspection/www.example.com-2026-05-15T21-49-07-887Z
|
|
32
|
+
* node scripts/mirror-snapshot-assets.mjs --paths /_next /assets /static /fonts
|
|
33
|
+
*
|
|
34
|
+
* Options:
|
|
35
|
+
* --base <url> Origin to fetch from. Defaults to LAUNCHFRAME_SOURCE_URL
|
|
36
|
+
* in src/lib/launchframe-config.ts.
|
|
37
|
+
* --capture <dir> Specific inspection capture folder. Defaults to the most
|
|
38
|
+
* recent capture under docs/research/<host>/page-inspection/
|
|
39
|
+
* (or docs/research/page-captures/<host>-*).
|
|
40
|
+
* --paths <list> Space- or comma-separated allowlist of root prefixes to
|
|
41
|
+
* mirror. Defaults to: /_next /assets /static /fonts /images.
|
|
42
|
+
* --concurrency <n> Parallel downloads (default 6).
|
|
43
|
+
* --dry-run Print the URLs that would be downloaded without writing.
|
|
44
|
+
*/
|
|
45
|
+
|
|
46
|
+
import { mkdir, readFile, readdir, stat, writeFile } from "node:fs/promises";
|
|
47
|
+
import { existsSync } from "node:fs";
|
|
48
|
+
import { dirname, join, resolve } from "node:path";
|
|
49
|
+
import { fileURLToPath } from "node:url";
|
|
50
|
+
|
|
51
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
52
|
+
const REPO_ROOT = resolve(__dirname, "..");
|
|
53
|
+
|
|
54
|
+
function parseArgs(argv) {
|
|
55
|
+
const flags = new Set();
|
|
56
|
+
/** @type {Record<string, string>} */
|
|
57
|
+
const opts = {};
|
|
58
|
+
for (let i = 2; i < argv.length; i++) {
|
|
59
|
+
const a = argv[i];
|
|
60
|
+
if (!a.startsWith("--")) continue;
|
|
61
|
+
const key = a.slice(2);
|
|
62
|
+
if (key === "dry-run") {
|
|
63
|
+
flags.add(key);
|
|
64
|
+
continue;
|
|
65
|
+
}
|
|
66
|
+
const next = argv[i + 1];
|
|
67
|
+
if (!next || next.startsWith("--")) {
|
|
68
|
+
console.error(`Missing value for --${key}`);
|
|
69
|
+
process.exit(1);
|
|
70
|
+
}
|
|
71
|
+
opts[key] = next;
|
|
72
|
+
i++;
|
|
73
|
+
}
|
|
74
|
+
return { flags, opts };
|
|
75
|
+
}
|
|
76
|
+
|
|
77
|
+
async function readSourceUrlFromConfig() {
|
|
78
|
+
const configPath = join(REPO_ROOT, "src/lib/launchframe-config.ts");
|
|
79
|
+
if (!existsSync(configPath)) return null;
|
|
80
|
+
const text = await readFile(configPath, "utf8");
|
|
81
|
+
const m = text.match(/LAUNCHFRAME_SOURCE_URL\s*=\s*["'`]([^"'`]+)["'`]/);
|
|
82
|
+
return m ? m[1] : null;
|
|
83
|
+
}
|
|
84
|
+
|
|
85
|
+
async function findLatestCapture(host) {
|
|
86
|
+
const candidates = [];
|
|
87
|
+
const a = join(REPO_ROOT, "docs/research", host, "page-inspection");
|
|
88
|
+
const b = join(REPO_ROOT, "docs/research/page-captures");
|
|
89
|
+
if (existsSync(a)) {
|
|
90
|
+
for (const name of await readdir(a)) {
|
|
91
|
+
const full = join(a, name);
|
|
92
|
+
const s = await stat(full);
|
|
93
|
+
if (s.isDirectory()) candidates.push({ full, mtimeMs: s.mtimeMs });
|
|
94
|
+
}
|
|
95
|
+
}
|
|
96
|
+
if (existsSync(b)) {
|
|
97
|
+
for (const name of await readdir(b)) {
|
|
98
|
+
if (!name.startsWith(`${host}-`)) continue;
|
|
99
|
+
const full = join(b, name);
|
|
100
|
+
const s = await stat(full);
|
|
101
|
+
if (s.isDirectory()) candidates.push({ full, mtimeMs: s.mtimeMs });
|
|
102
|
+
}
|
|
103
|
+
}
|
|
104
|
+
candidates.sort((x, y) => y.mtimeMs - x.mtimeMs);
|
|
105
|
+
return candidates[0]?.full ?? null;
|
|
106
|
+
}
|
|
107
|
+
|
|
108
|
+
function collectRootRelativePaths(text, allowedPrefixes) {
|
|
109
|
+
/** @type {Set<string>} */
|
|
110
|
+
const found = new Set();
|
|
111
|
+
const re = /(?:["'(])(\/[A-Za-z0-9_./~?@\-+%=#]+)/g;
|
|
112
|
+
let m;
|
|
113
|
+
while ((m = re.exec(text)) !== null) {
|
|
114
|
+
const path = m[1].split("?")[0].split("#")[0];
|
|
115
|
+
if (path.startsWith("//")) continue;
|
|
116
|
+
if (!allowedPrefixes.some((p) => path.startsWith(p + "/") || path === p)) continue;
|
|
117
|
+
found.add(path);
|
|
118
|
+
}
|
|
119
|
+
return [...found];
|
|
120
|
+
}
|
|
121
|
+
|
|
122
|
+
async function downloadOne(baseUrl, path, dryRun) {
|
|
123
|
+
const url = new URL(path, baseUrl).toString();
|
|
124
|
+
const target = join(REPO_ROOT, "public", path.replace(/^\//, ""));
|
|
125
|
+
if (existsSync(target)) {
|
|
126
|
+
return { url, target, status: "skipped (exists)" };
|
|
127
|
+
}
|
|
128
|
+
if (dryRun) {
|
|
129
|
+
return { url, target, status: "would download" };
|
|
130
|
+
}
|
|
131
|
+
const res = await fetch(url, {
|
|
132
|
+
headers: {
|
|
133
|
+
"user-agent":
|
|
134
|
+
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/120.0.0.0 Safari/537.36 LaunchFrameMirror/1",
|
|
135
|
+
},
|
|
136
|
+
redirect: "follow",
|
|
137
|
+
});
|
|
138
|
+
if (!res.ok) {
|
|
139
|
+
return { url, target, status: `fail ${res.status}` };
|
|
140
|
+
}
|
|
141
|
+
const buf = Buffer.from(await res.arrayBuffer());
|
|
142
|
+
await mkdir(dirname(target), { recursive: true });
|
|
143
|
+
await writeFile(target, buf);
|
|
144
|
+
return { url, target, status: `ok ${buf.byteLength}b` };
|
|
145
|
+
}
|
|
146
|
+
|
|
147
|
+
async function runWithConcurrency(items, n, fn) {
|
|
148
|
+
const results = [];
|
|
149
|
+
let i = 0;
|
|
150
|
+
const workers = Array.from({ length: Math.max(1, n) }, async () => {
|
|
151
|
+
while (i < items.length) {
|
|
152
|
+
const idx = i++;
|
|
153
|
+
try {
|
|
154
|
+
results[idx] = await fn(items[idx]);
|
|
155
|
+
} catch (e) {
|
|
156
|
+
results[idx] = { error: String(e), item: items[idx] };
|
|
157
|
+
}
|
|
158
|
+
}
|
|
159
|
+
});
|
|
160
|
+
await Promise.all(workers);
|
|
161
|
+
return results;
|
|
162
|
+
}
|
|
163
|
+
|
|
164
|
+
async function main() {
|
|
165
|
+
const { flags, opts } = parseArgs(process.argv);
|
|
166
|
+
|
|
167
|
+
let baseUrlRaw = opts.base ?? (await readSourceUrlFromConfig());
|
|
168
|
+
if (!baseUrlRaw) {
|
|
169
|
+
console.error(
|
|
170
|
+
"Could not determine base URL. Pass --base <url> or set LAUNCHFRAME_SOURCE_URL in src/lib/launchframe-config.ts.",
|
|
171
|
+
);
|
|
172
|
+
process.exit(1);
|
|
173
|
+
}
|
|
174
|
+
let baseUrl;
|
|
175
|
+
try {
|
|
176
|
+
baseUrl = new URL(baseUrlRaw);
|
|
177
|
+
} catch {
|
|
178
|
+
console.error(`Invalid --base URL: ${baseUrlRaw}`);
|
|
179
|
+
process.exit(1);
|
|
180
|
+
}
|
|
181
|
+
|
|
182
|
+
let captureDir = opts.capture
|
|
183
|
+
? resolve(process.cwd(), opts.capture)
|
|
184
|
+
: await findLatestCapture(baseUrl.hostname);
|
|
185
|
+
if (!captureDir || !existsSync(captureDir)) {
|
|
186
|
+
console.error(
|
|
187
|
+
`No inspection capture found. Run \`npm run inspect:page -- "${baseUrl.toString()}" --scroll-full --out-parent "docs/research/${baseUrl.hostname}/page-inspection"\` first, or pass --capture <dir>.`,
|
|
188
|
+
);
|
|
189
|
+
process.exit(1);
|
|
190
|
+
}
|
|
191
|
+
|
|
192
|
+
const allowed = (opts.paths ?? "/_next /assets /static /fonts /images")
|
|
193
|
+
.split(/[\s,]+/)
|
|
194
|
+
.filter(Boolean)
|
|
195
|
+
.map((p) => (p.startsWith("/") ? p : `/${p}`));
|
|
196
|
+
|
|
197
|
+
const concurrency = Number(opts.concurrency ?? "6");
|
|
198
|
+
|
|
199
|
+
const documentHtmlPath = join(captureDir, "document.html");
|
|
200
|
+
const cssFiles = (await readdir(captureDir)).filter((n) => n.endsWith(".css"));
|
|
201
|
+
|
|
202
|
+
const aggregated = [];
|
|
203
|
+
if (existsSync(documentHtmlPath)) {
|
|
204
|
+
aggregated.push(await readFile(documentHtmlPath, "utf8"));
|
|
205
|
+
}
|
|
206
|
+
for (const f of cssFiles) {
|
|
207
|
+
aggregated.push(await readFile(join(captureDir, f), "utf8"));
|
|
208
|
+
}
|
|
209
|
+
const inlineStylesPath = join(captureDir, "inline-styles.json");
|
|
210
|
+
if (existsSync(inlineStylesPath)) {
|
|
211
|
+
aggregated.push(await readFile(inlineStylesPath, "utf8"));
|
|
212
|
+
}
|
|
213
|
+
|
|
214
|
+
const paths = collectRootRelativePaths(aggregated.join("\n"), allowed);
|
|
215
|
+
if (paths.length === 0) {
|
|
216
|
+
console.log(
|
|
217
|
+
`No root-relative paths under ${allowed.join(", ")} were found in ${captureDir}. Nothing to mirror.`,
|
|
218
|
+
);
|
|
219
|
+
return;
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
console.log(
|
|
223
|
+
`Mirroring ${paths.length} root-relative assets from ${baseUrl.origin} to public/ ...`,
|
|
224
|
+
);
|
|
225
|
+
if (flags.has("dry-run")) {
|
|
226
|
+
console.log("(dry-run mode — no files will be written)");
|
|
227
|
+
}
|
|
228
|
+
|
|
229
|
+
const results = await runWithConcurrency(paths, concurrency, (p) =>
|
|
230
|
+
downloadOne(baseUrl.origin, p, flags.has("dry-run")),
|
|
231
|
+
);
|
|
232
|
+
|
|
233
|
+
let ok = 0;
|
|
234
|
+
let skipped = 0;
|
|
235
|
+
let failed = 0;
|
|
236
|
+
for (const r of results) {
|
|
237
|
+
if (!r) continue;
|
|
238
|
+
if ("error" in r) {
|
|
239
|
+
failed++;
|
|
240
|
+
console.warn(` ! ${r.item} → ${r.error}`);
|
|
241
|
+
continue;
|
|
242
|
+
}
|
|
243
|
+
if (r.status.startsWith("ok")) ok++;
|
|
244
|
+
else if (r.status.startsWith("skipped") || r.status === "would download") skipped++;
|
|
245
|
+
else failed++;
|
|
246
|
+
console.log(` ${r.status.padEnd(20)} ${r.url}`);
|
|
247
|
+
}
|
|
248
|
+
|
|
249
|
+
console.log(
|
|
250
|
+
`\nDone. ${ok} downloaded, ${skipped} skipped/dry, ${failed} failed.\nMirrored bundles are runtime fixtures, not source. Edit components under src/.`,
|
|
251
|
+
);
|
|
252
|
+
}
|
|
253
|
+
|
|
254
|
+
main().catch((e) => {
|
|
255
|
+
console.error(e);
|
|
256
|
+
process.exit(1);
|
|
257
|
+
});
|
|
@@ -1,88 +1,88 @@
|
|
|
1
|
-
#!/usr/bin/env bash
|
|
2
|
-
#
|
|
3
|
-
# sync-agent-rules.sh — Generate AI agent config files from AGENTS.md
|
|
4
|
-
#
|
|
5
|
-
# AGENTS.md is the single source of truth. This script creates copies
|
|
6
|
-
# for agents that don't read AGENTS.md natively (Cline, Continue,
|
|
7
|
-
# Amazon Q, GitHub Copilot Chat).
|
|
8
|
-
#
|
|
9
|
-
# Usage:
|
|
10
|
-
# bash scripts/sync-agent-rules.sh
|
|
11
|
-
#
|
|
12
|
-
# Agents that DON'T need generated files (they read AGENTS.md natively):
|
|
13
|
-
# Codex CLI, OpenCode, Cursor, Windsurf, Copilot Coding Agent,
|
|
14
|
-
# Roo Code, Aider, Augment Code
|
|
15
|
-
#
|
|
16
|
-
# Agents with their own thin pointer files (created manually):
|
|
17
|
-
# Claude Code → CLAUDE.md (@AGENTS.md import)
|
|
18
|
-
# Gemini CLI → GEMINI.md (@AGENTS.md import)
|
|
19
|
-
# Cursor → .cursor/rules/project.mdc (pointer)
|
|
20
|
-
# Windsurf → .windsurfrules (pointer)
|
|
21
|
-
# Aider → .aider.conf.yml (read: [AGENTS.md])
|
|
22
|
-
|
|
23
|
-
set -euo pipefail
|
|
24
|
-
|
|
25
|
-
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
|
26
|
-
SOURCE="$REPO_ROOT/AGENTS.md"
|
|
27
|
-
|
|
28
|
-
if [[ ! -f "$SOURCE" ]]; then
|
|
29
|
-
echo "Error: AGENTS.md not found at $SOURCE" >&2
|
|
30
|
-
exit 1
|
|
31
|
-
fi
|
|
32
|
-
|
|
33
|
-
# Resolve @file imports (Claude Code syntax) into inline content.
|
|
34
|
-
# Lines like "@docs/research/INSPECTION_GUIDE.md" become the file's contents.
|
|
35
|
-
resolve_imports() {
|
|
36
|
-
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
37
|
-
line="${line%$'\r'}"
|
|
38
|
-
if [[ "$line" =~ ^@(.+)$ ]]; then
|
|
39
|
-
local import_path="${BASH_REMATCH[1]}"
|
|
40
|
-
local resolved="$REPO_ROOT/$import_path"
|
|
41
|
-
if [[ -f "$resolved" ]]; then
|
|
42
|
-
cat "$resolved"
|
|
43
|
-
echo ""
|
|
44
|
-
else
|
|
45
|
-
echo "<!-- Import not found: $import_path -->"
|
|
46
|
-
fi
|
|
47
|
-
else
|
|
48
|
-
echo "$line"
|
|
49
|
-
fi
|
|
50
|
-
done < "$SOURCE"
|
|
51
|
-
}
|
|
52
|
-
|
|
53
|
-
RESOLVED_CONTENT="$(resolve_imports)"
|
|
54
|
-
|
|
55
|
-
HEADER="<!-- AUTO-GENERATED from AGENTS.md — do not edit directly.
|
|
56
|
-
Run \`bash scripts/sync-agent-rules.sh\` to regenerate. -->"
|
|
57
|
-
|
|
58
|
-
# Helper: write a generated file with header
|
|
59
|
-
write_file() {
|
|
60
|
-
local target="$1"
|
|
61
|
-
local content="$2"
|
|
62
|
-
mkdir -p "$(dirname "$target")"
|
|
63
|
-
printf '%s\n\n%s\n' "$HEADER" "$content" > "$target"
|
|
64
|
-
echo " ✓ $target"
|
|
65
|
-
}
|
|
66
|
-
|
|
67
|
-
echo "Syncing agent rules from AGENTS.md..."
|
|
68
|
-
|
|
69
|
-
# GitHub Copilot Chat — .github/copilot-instructions.md
|
|
70
|
-
write_file "$REPO_ROOT/.github/copilot-instructions.md" "$RESOLVED_CONTENT"
|
|
71
|
-
|
|
72
|
-
# Cline / Roo Code — .clinerules
|
|
73
|
-
write_file "$REPO_ROOT/.clinerules" "$RESOLVED_CONTENT"
|
|
74
|
-
|
|
75
|
-
# Continue — .continue/rules/project.md
|
|
76
|
-
CONTINUE_FRONTMATTER="---
|
|
77
|
-
description: Project conventions for AI Website Clone Template
|
|
78
|
-
alwaysApply: true
|
|
79
|
-
---"
|
|
80
|
-
write_file "$REPO_ROOT/.continue/rules/project.md" "$CONTINUE_FRONTMATTER
|
|
81
|
-
$RESOLVED_CONTENT"
|
|
82
|
-
|
|
83
|
-
# Amazon Q Developer — .amazonq/rules/project.md
|
|
84
|
-
write_file "$REPO_ROOT/.amazonq/rules/project.md" "$RESOLVED_CONTENT"
|
|
85
|
-
|
|
86
|
-
echo ""
|
|
87
|
-
echo "Done. Generated files are committed to the repo but sourced from AGENTS.md."
|
|
88
|
-
echo "Edit AGENTS.md, then re-run this script to update all agent configs."
|
|
1
|
+
#!/usr/bin/env bash
|
|
2
|
+
#
|
|
3
|
+
# sync-agent-rules.sh — Generate AI agent config files from AGENTS.md
|
|
4
|
+
#
|
|
5
|
+
# AGENTS.md is the single source of truth. This script creates copies
|
|
6
|
+
# for agents that don't read AGENTS.md natively (Cline, Continue,
|
|
7
|
+
# Amazon Q, GitHub Copilot Chat).
|
|
8
|
+
#
|
|
9
|
+
# Usage:
|
|
10
|
+
# bash scripts/sync-agent-rules.sh
|
|
11
|
+
#
|
|
12
|
+
# Agents that DON'T need generated files (they read AGENTS.md natively):
|
|
13
|
+
# Codex CLI, OpenCode, Cursor, Windsurf, Copilot Coding Agent,
|
|
14
|
+
# Roo Code, Aider, Augment Code
|
|
15
|
+
#
|
|
16
|
+
# Agents with their own thin pointer files (created manually):
|
|
17
|
+
# Claude Code → CLAUDE.md (@AGENTS.md import)
|
|
18
|
+
# Gemini CLI → GEMINI.md (@AGENTS.md import)
|
|
19
|
+
# Cursor → .cursor/rules/project.mdc (pointer)
|
|
20
|
+
# Windsurf → .windsurfrules (pointer)
|
|
21
|
+
# Aider → .aider.conf.yml (read: [AGENTS.md])
|
|
22
|
+
|
|
23
|
+
set -euo pipefail
|
|
24
|
+
|
|
25
|
+
REPO_ROOT="$(cd "$(dirname "$0")/.." && pwd)"
|
|
26
|
+
SOURCE="$REPO_ROOT/AGENTS.md"
|
|
27
|
+
|
|
28
|
+
if [[ ! -f "$SOURCE" ]]; then
|
|
29
|
+
echo "Error: AGENTS.md not found at $SOURCE" >&2
|
|
30
|
+
exit 1
|
|
31
|
+
fi
|
|
32
|
+
|
|
33
|
+
# Resolve @file imports (Claude Code syntax) into inline content.
|
|
34
|
+
# Lines like "@docs/research/INSPECTION_GUIDE.md" become the file's contents.
|
|
35
|
+
resolve_imports() {
|
|
36
|
+
while IFS= read -r line || [[ -n "$line" ]]; do
|
|
37
|
+
line="${line%$'\r'}"
|
|
38
|
+
if [[ "$line" =~ ^@(.+)$ ]]; then
|
|
39
|
+
local import_path="${BASH_REMATCH[1]}"
|
|
40
|
+
local resolved="$REPO_ROOT/$import_path"
|
|
41
|
+
if [[ -f "$resolved" ]]; then
|
|
42
|
+
cat "$resolved"
|
|
43
|
+
echo ""
|
|
44
|
+
else
|
|
45
|
+
echo "<!-- Import not found: $import_path -->"
|
|
46
|
+
fi
|
|
47
|
+
else
|
|
48
|
+
echo "$line"
|
|
49
|
+
fi
|
|
50
|
+
done < "$SOURCE"
|
|
51
|
+
}
|
|
52
|
+
|
|
53
|
+
RESOLVED_CONTENT="$(resolve_imports)"
|
|
54
|
+
|
|
55
|
+
HEADER="<!-- AUTO-GENERATED from AGENTS.md — do not edit directly.
|
|
56
|
+
Run \`bash scripts/sync-agent-rules.sh\` to regenerate. -->"
|
|
57
|
+
|
|
58
|
+
# Helper: write a generated file with header
|
|
59
|
+
write_file() {
|
|
60
|
+
local target="$1"
|
|
61
|
+
local content="$2"
|
|
62
|
+
mkdir -p "$(dirname "$target")"
|
|
63
|
+
printf '%s\n\n%s\n' "$HEADER" "$content" > "$target"
|
|
64
|
+
echo " ✓ $target"
|
|
65
|
+
}
|
|
66
|
+
|
|
67
|
+
echo "Syncing agent rules from AGENTS.md..."
|
|
68
|
+
|
|
69
|
+
# GitHub Copilot Chat — .github/copilot-instructions.md
|
|
70
|
+
write_file "$REPO_ROOT/.github/copilot-instructions.md" "$RESOLVED_CONTENT"
|
|
71
|
+
|
|
72
|
+
# Cline / Roo Code — .clinerules
|
|
73
|
+
write_file "$REPO_ROOT/.clinerules" "$RESOLVED_CONTENT"
|
|
74
|
+
|
|
75
|
+
# Continue — .continue/rules/project.md
|
|
76
|
+
CONTINUE_FRONTMATTER="---
|
|
77
|
+
description: Project conventions for AI Website Clone Template
|
|
78
|
+
alwaysApply: true
|
|
79
|
+
---"
|
|
80
|
+
write_file "$REPO_ROOT/.continue/rules/project.md" "$CONTINUE_FRONTMATTER
|
|
81
|
+
$RESOLVED_CONTENT"
|
|
82
|
+
|
|
83
|
+
# Amazon Q Developer — .amazonq/rules/project.md
|
|
84
|
+
write_file "$REPO_ROOT/.amazonq/rules/project.md" "$RESOLVED_CONTENT"
|
|
85
|
+
|
|
86
|
+
echo ""
|
|
87
|
+
echo "Done. Generated files are committed to the repo but sourced from AGENTS.md."
|
|
88
|
+
echo "Edit AGENTS.md, then re-run this script to update all agent configs."
|