opencodekit 0.18.2 → 0.18.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- package/dist/index.js +161 -38
- package/dist/template/.opencode/.version +1 -1
- package/dist/template/.opencode/AGENTS.md +39 -9
- package/dist/template/.opencode/AGENT_ALIGNMENT.md +6 -6
- package/dist/template/.opencode/agent/build.md +7 -103
- package/dist/template/.opencode/agent/general.md +0 -52
- package/dist/template/.opencode/agent/plan.md +10 -0
- package/dist/template/.opencode/agent/runner.md +79 -0
- package/dist/template/.opencode/command/create.md +13 -1
- package/dist/template/.opencode/command/init-context.md +20 -6
- package/dist/template/.opencode/command/init-user.md +18 -16
- package/dist/template/.opencode/command/lfg.md +3 -4
- package/dist/template/.opencode/command/ship.md +6 -48
- package/dist/template/.opencode/command/start.md +20 -3
- package/dist/template/.opencode/command/verify.md +12 -17
- package/dist/template/.opencode/context/README.md +29 -0
- package/dist/template/.opencode/memory/_templates/{STATE.md → state.md} +1 -1
- package/dist/template/.opencode/memory.db +0 -0
- package/dist/template/.opencode/memory.db-shm +0 -0
- package/dist/template/.opencode/memory.db-wal +0 -0
- package/dist/template/.opencode/opencode.json +131 -7
- package/dist/template/.opencode/package.json +1 -1
- package/dist/template/.opencode/plugin/lib/memory-helpers.ts +51 -16
- package/dist/template/.opencode/plugin/lib/memory-hooks.ts +1 -1
- package/dist/template/.opencode/skill/beads/references/TROUBLESHOOTING.md +25 -0
- package/dist/template/.opencode/skill/using-git-worktrees/SKILL.md +26 -0
- package/dist/template/.opencode/skill/verification-before-completion/references/VERIFICATION_PROTOCOL.md +67 -0
- package/package.json +1 -1
- /package/dist/template/.opencode/memory/_templates/{PROJECT.md → project.md} +0 -0
- /package/dist/template/.opencode/memory/_templates/{ROADMAP.md → roadmap.md} +0 -0
package/dist/index.js
CHANGED
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
import { createRequire } from "node:module";
|
|
3
3
|
import { cac } from "cac";
|
|
4
4
|
import { copyFileSync, existsSync, lstatSync, mkdirSync, readFileSync, readdirSync, renameSync, rmSync, unlinkSync, writeFileSync } from "node:fs";
|
|
5
|
-
import { basename, dirname, join } from "node:path";
|
|
5
|
+
import { basename, dirname, join, relative } from "node:path";
|
|
6
6
|
import * as p from "@clack/prompts";
|
|
7
7
|
import color from "picocolors";
|
|
8
8
|
import { z } from "zod";
|
|
@@ -18,7 +18,7 @@ var __require = /* @__PURE__ */ createRequire(import.meta.url);
|
|
|
18
18
|
|
|
19
19
|
//#endregion
|
|
20
20
|
//#region package.json
|
|
21
|
-
var version = "0.18.
|
|
21
|
+
var version = "0.18.3";
|
|
22
22
|
|
|
23
23
|
//#endregion
|
|
24
24
|
//#region src/utils/errors.ts
|
|
@@ -2409,6 +2409,83 @@ async function editAutoupdate(configPath) {
|
|
|
2409
2409
|
p.log.success(`Autoupdate set to ${color.cyan(selectedValue)}`);
|
|
2410
2410
|
}
|
|
2411
2411
|
|
|
2412
|
+
//#endregion
|
|
2413
|
+
//#region src/utils/manifest.ts
|
|
2414
|
+
const MANIFEST_FILE = ".template-manifest.json";
|
|
2415
|
+
/**
|
|
2416
|
+
* Compute SHA-256 hash of file content.
|
|
2417
|
+
*/
|
|
2418
|
+
function hashFile(filePath) {
|
|
2419
|
+
const content = readFileSync(filePath, "utf-8");
|
|
2420
|
+
return createHash("sha256").update(content).digest("hex");
|
|
2421
|
+
}
|
|
2422
|
+
/**
|
|
2423
|
+
* Scan a directory recursively and build a hash map of all files.
|
|
2424
|
+
* Returns record of relative paths to SHA-256 hashes.
|
|
2425
|
+
*/
|
|
2426
|
+
function buildManifestFromDir(dir, skipDirs = [
|
|
2427
|
+
"node_modules",
|
|
2428
|
+
".git",
|
|
2429
|
+
"dist",
|
|
2430
|
+
"coverage"
|
|
2431
|
+
]) {
|
|
2432
|
+
const files = {};
|
|
2433
|
+
function walk(currentDir) {
|
|
2434
|
+
for (const entry of readdirSync(currentDir, { withFileTypes: true })) if (entry.isDirectory()) {
|
|
2435
|
+
if (skipDirs.includes(entry.name)) continue;
|
|
2436
|
+
walk(join(currentDir, entry.name));
|
|
2437
|
+
} else if (entry.isFile()) {
|
|
2438
|
+
if (entry.name === MANIFEST_FILE) continue;
|
|
2439
|
+
const fullPath = join(currentDir, entry.name);
|
|
2440
|
+
const relPath = relative(dir, fullPath);
|
|
2441
|
+
files[relPath] = hashFile(fullPath);
|
|
2442
|
+
}
|
|
2443
|
+
}
|
|
2444
|
+
walk(dir);
|
|
2445
|
+
return files;
|
|
2446
|
+
}
|
|
2447
|
+
/**
|
|
2448
|
+
* Generate and save a template manifest after installation.
|
|
2449
|
+
*/
|
|
2450
|
+
function generateManifest(opencodeDir, version) {
|
|
2451
|
+
const manifest = {
|
|
2452
|
+
version,
|
|
2453
|
+
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2454
|
+
files: buildManifestFromDir(opencodeDir)
|
|
2455
|
+
};
|
|
2456
|
+
writeFileSync(join(opencodeDir, MANIFEST_FILE), JSON.stringify(manifest, null, 2));
|
|
2457
|
+
return manifest;
|
|
2458
|
+
}
|
|
2459
|
+
/**
|
|
2460
|
+
* Load existing manifest from .opencode/ directory.
|
|
2461
|
+
* Returns null if no manifest exists.
|
|
2462
|
+
*/
|
|
2463
|
+
function loadManifest(opencodeDir) {
|
|
2464
|
+
const manifestPath = join(opencodeDir, MANIFEST_FILE);
|
|
2465
|
+
if (!existsSync(manifestPath)) return null;
|
|
2466
|
+
try {
|
|
2467
|
+
return JSON.parse(readFileSync(manifestPath, "utf-8"));
|
|
2468
|
+
} catch {
|
|
2469
|
+
return null;
|
|
2470
|
+
}
|
|
2471
|
+
}
|
|
2472
|
+
/**
|
|
2473
|
+
* Determine if a file has been modified by the user.
|
|
2474
|
+
* Compares current file hash against the manifest's recorded hash.
|
|
2475
|
+
*
|
|
2476
|
+
* Returns:
|
|
2477
|
+
* - "unmodified" — file matches template, safe to update
|
|
2478
|
+
* - "modified" — user changed it, should preserve
|
|
2479
|
+
* - "unknown" — not in manifest (new user file or missing manifest)
|
|
2480
|
+
*/
|
|
2481
|
+
function fileModificationStatus(filePath, relativePath, manifest) {
|
|
2482
|
+
if (!manifest) return "unknown";
|
|
2483
|
+
const templateHash = manifest.files[relativePath];
|
|
2484
|
+
if (!templateHash) return "unknown";
|
|
2485
|
+
if (!existsSync(filePath)) return "unmodified";
|
|
2486
|
+
return hashFile(filePath) === templateHash ? "unmodified" : "modified";
|
|
2487
|
+
}
|
|
2488
|
+
|
|
2412
2489
|
//#endregion
|
|
2413
2490
|
//#region src/utils/patch.ts
|
|
2414
2491
|
/**
|
|
@@ -2467,7 +2544,7 @@ function savePatchMetadata(opencodeDir, metadata) {
|
|
|
2467
2544
|
/**
|
|
2468
2545
|
* Get the current OpenCodeKit version from package.json.
|
|
2469
2546
|
*/
|
|
2470
|
-
function getPackageVersion$
|
|
2547
|
+
function getPackageVersion$2() {
|
|
2471
2548
|
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
2472
2549
|
const pkgPaths = [join(__dirname, "..", "..", "package.json"), join(__dirname, "..", "package.json")];
|
|
2473
2550
|
for (const pkgPath of pkgPaths) if (existsSync(pkgPath)) try {
|
|
@@ -2497,7 +2574,7 @@ function savePatch(opencodeDir, relativePath, templateContent, userContent) {
|
|
|
2497
2574
|
currentHash: calculateHash(userContent),
|
|
2498
2575
|
patchFile: patchFilename,
|
|
2499
2576
|
createdAt: (/* @__PURE__ */ new Date()).toISOString(),
|
|
2500
|
-
templateVersion: getPackageVersion$
|
|
2577
|
+
templateVersion: getPackageVersion$2()
|
|
2501
2578
|
};
|
|
2502
2579
|
metadata.patches[relativePath] = entry;
|
|
2503
2580
|
savePatchMetadata(opencodeDir, metadata);
|
|
@@ -2578,7 +2655,7 @@ const EXCLUDED_FILES = [
|
|
|
2578
2655
|
"yarn.lock",
|
|
2579
2656
|
"pnpm-lock.yaml"
|
|
2580
2657
|
];
|
|
2581
|
-
const PRESERVE_USER_DIRS = ["memory/project"];
|
|
2658
|
+
const PRESERVE_USER_DIRS = ["memory/project", "context"];
|
|
2582
2659
|
/**
|
|
2583
2660
|
* Get the global OpenCode config directory based on OS.
|
|
2584
2661
|
* - macOS/Linux: ~/.config/opencode/ (respects XDG_CONFIG_HOME)
|
|
@@ -2601,6 +2678,15 @@ function getTemplateRoot$1() {
|
|
|
2601
2678
|
for (const path of possiblePaths) if (existsSync(join(path, ".opencode"))) return path;
|
|
2602
2679
|
return null;
|
|
2603
2680
|
}
|
|
2681
|
+
function getPackageVersion$1() {
|
|
2682
|
+
const __dirname = dirname(fileURLToPath(import.meta.url));
|
|
2683
|
+
const pkgPaths = [join(__dirname, "..", "..", "package.json"), join(__dirname, "..", "package.json")];
|
|
2684
|
+
for (const pkgPath of pkgPaths) {
|
|
2685
|
+
if (!existsSync(pkgPath)) continue;
|
|
2686
|
+
return JSON.parse(readFileSync(pkgPath, "utf-8")).version;
|
|
2687
|
+
}
|
|
2688
|
+
return "unknown";
|
|
2689
|
+
}
|
|
2604
2690
|
async function copyDir(src, dest) {
|
|
2605
2691
|
const { mkdir, readdir } = await import("node:fs/promises");
|
|
2606
2692
|
await mkdir(dest, { recursive: true });
|
|
@@ -2729,7 +2815,7 @@ function getTemplateFiles(templateRoot) {
|
|
|
2729
2815
|
function findOrphans(targetDir, templateFiles) {
|
|
2730
2816
|
const opencodeDir = join(targetDir, ".opencode");
|
|
2731
2817
|
if (!existsSync(opencodeDir)) return [];
|
|
2732
|
-
return getAffectedFiles(opencodeDir).filter((f) => !templateFiles.has(f));
|
|
2818
|
+
return getAffectedFiles(opencodeDir).filter((f) => f !== MANIFEST_FILE && !templateFiles.has(f));
|
|
2733
2819
|
}
|
|
2734
2820
|
/**
|
|
2735
2821
|
* Check if an orphan file is a modified template file (exists in template but content differs).
|
|
@@ -2778,16 +2864,23 @@ async function autoSavePatchesForOrphans(targetDir, templateRoot, orphans) {
|
|
|
2778
2864
|
function preserveUserFiles(targetDir) {
|
|
2779
2865
|
const opencodeDir = join(targetDir, ".opencode");
|
|
2780
2866
|
const preserved = /* @__PURE__ */ new Map();
|
|
2781
|
-
|
|
2782
|
-
const
|
|
2783
|
-
|
|
2784
|
-
|
|
2867
|
+
function collectFiles(currentDir, relativeDir) {
|
|
2868
|
+
for (const entry of readdirSync(currentDir, { withFileTypes: true })) {
|
|
2869
|
+
const filePath = join(currentDir, entry.name);
|
|
2870
|
+
const relativePath = join(relativeDir, entry.name);
|
|
2871
|
+
if (entry.isDirectory()) {
|
|
2872
|
+
collectFiles(filePath, relativePath);
|
|
2873
|
+
continue;
|
|
2874
|
+
}
|
|
2785
2875
|
if (!entry.isFile()) continue;
|
|
2786
|
-
const filePath = join(dirPath, entry.name);
|
|
2787
|
-
const relativePath = join(relDir, entry.name);
|
|
2788
2876
|
preserved.set(relativePath, readFileSync(filePath, "utf-8"));
|
|
2789
2877
|
}
|
|
2790
2878
|
}
|
|
2879
|
+
for (const relDir of PRESERVE_USER_DIRS) {
|
|
2880
|
+
const dirPath = join(opencodeDir, relDir);
|
|
2881
|
+
if (!existsSync(dirPath)) continue;
|
|
2882
|
+
collectFiles(dirPath, relDir);
|
|
2883
|
+
}
|
|
2791
2884
|
return preserved;
|
|
2792
2885
|
}
|
|
2793
2886
|
/**
|
|
@@ -2801,6 +2894,12 @@ function restoreUserFiles(targetDir, preserved) {
|
|
|
2801
2894
|
writeFileSync(filePath, content);
|
|
2802
2895
|
}
|
|
2803
2896
|
}
|
|
2897
|
+
function finalizeInstalledFiles(targetDir, version, preservedFiles) {
|
|
2898
|
+
generateManifest(join(targetDir, ".opencode"), version);
|
|
2899
|
+
if (!preservedFiles || preservedFiles.size === 0) return 0;
|
|
2900
|
+
restoreUserFiles(targetDir, preservedFiles);
|
|
2901
|
+
return preservedFiles.size;
|
|
2902
|
+
}
|
|
2804
2903
|
async function initCommand(rawOptions = {}) {
|
|
2805
2904
|
const options = parseOptions(InitOptionsSchema, rawOptions);
|
|
2806
2905
|
if (process.argv.includes("--quiet")) return;
|
|
@@ -2911,10 +3010,8 @@ async function initCommand(rawOptions = {}) {
|
|
|
2911
3010
|
process.exit(1);
|
|
2912
3011
|
}
|
|
2913
3012
|
s.stop("Done");
|
|
2914
|
-
|
|
2915
|
-
|
|
2916
|
-
p.log.info(`Preserved ${preservedFiles.size} user memory files (memory/project/)`);
|
|
2917
|
-
}
|
|
3013
|
+
const restoredFileCount = finalizeInstalledFiles(targetDir, getPackageVersion$1(), preservedFiles);
|
|
3014
|
+
if (restoredFileCount > 0) p.log.info(`Preserved ${restoredFileCount} user memory files (memory/project/)`);
|
|
2918
3015
|
if (options.free) {
|
|
2919
3016
|
applyModelPreset(targetDir, "free");
|
|
2920
3017
|
p.log.info("Applied free model preset");
|
|
@@ -3352,6 +3449,7 @@ const PRESERVE_FILES = ["opencode.json", ".env"];
|
|
|
3352
3449
|
const PRESERVE_DIRS = [
|
|
3353
3450
|
"agent",
|
|
3354
3451
|
"command",
|
|
3452
|
+
"context",
|
|
3355
3453
|
"memory",
|
|
3356
3454
|
"skill",
|
|
3357
3455
|
"tool"
|
|
@@ -3388,7 +3486,8 @@ async function checkVersion(opencodeDir) {
|
|
|
3388
3486
|
needsUpdate: current !== latest
|
|
3389
3487
|
};
|
|
3390
3488
|
}
|
|
3391
|
-
function copyDirWithPreserve(src, dest, preserveFiles, preserveDirs) {
|
|
3489
|
+
function copyDirWithPreserve(src, dest, preserveFiles, preserveDirs, manifest, basePath = "") {
|
|
3490
|
+
const added = [];
|
|
3392
3491
|
const updated = [];
|
|
3393
3492
|
const preserved = [];
|
|
3394
3493
|
if (!existsSync(dest)) mkdirSync(dest, { recursive: true });
|
|
@@ -3399,25 +3498,34 @@ function copyDirWithPreserve(src, dest, preserveFiles, preserveDirs) {
|
|
|
3399
3498
|
const destPath = join(dest, entry.name);
|
|
3400
3499
|
if (entry.isDirectory()) if (preserveDirs.includes(entry.name)) {
|
|
3401
3500
|
if (!existsSync(destPath)) mkdirSync(destPath, { recursive: true });
|
|
3402
|
-
const subResult = copyDirPreserveExisting(srcPath, destPath);
|
|
3501
|
+
const subResult = copyDirPreserveExisting(srcPath, destPath, manifest, entry.name);
|
|
3502
|
+
added.push(...subResult.added);
|
|
3403
3503
|
updated.push(...subResult.updated);
|
|
3404
3504
|
preserved.push(...subResult.preserved);
|
|
3405
3505
|
} else {
|
|
3406
|
-
const subResult = copyDirWithPreserve(srcPath, destPath, [], []);
|
|
3506
|
+
const subResult = copyDirWithPreserve(srcPath, destPath, [], [], manifest, join(basePath, entry.name));
|
|
3507
|
+
added.push(...subResult.added);
|
|
3407
3508
|
updated.push(...subResult.updated);
|
|
3509
|
+
preserved.push(...subResult.preserved);
|
|
3408
3510
|
}
|
|
3409
|
-
else if (preserveFiles.includes(entry.name) && existsSync(destPath)) preserved.push(entry.name);
|
|
3410
3511
|
else {
|
|
3411
|
-
|
|
3412
|
-
|
|
3512
|
+
const relativePath = join(basePath, entry.name);
|
|
3513
|
+
if (preserveFiles.includes(entry.name) && existsSync(destPath)) preserved.push(relativePath);
|
|
3514
|
+
else {
|
|
3515
|
+
if (!existsSync(destPath)) added.push(relativePath);
|
|
3516
|
+
else updated.push(relativePath);
|
|
3517
|
+
copyFileSync(srcPath, destPath);
|
|
3518
|
+
}
|
|
3413
3519
|
}
|
|
3414
3520
|
}
|
|
3415
3521
|
return {
|
|
3522
|
+
added,
|
|
3416
3523
|
updated,
|
|
3417
3524
|
preserved
|
|
3418
3525
|
};
|
|
3419
3526
|
}
|
|
3420
|
-
function copyDirPreserveExisting(src, dest) {
|
|
3527
|
+
function copyDirPreserveExisting(src, dest, manifest, basePath = "") {
|
|
3528
|
+
const added = [];
|
|
3421
3529
|
const updated = [];
|
|
3422
3530
|
const preserved = [];
|
|
3423
3531
|
const entries = readdirSync(src, { withFileTypes: true });
|
|
@@ -3426,15 +3534,23 @@ function copyDirPreserveExisting(src, dest) {
|
|
|
3426
3534
|
const destPath = join(dest, entry.name);
|
|
3427
3535
|
if (entry.isDirectory()) {
|
|
3428
3536
|
if (!existsSync(destPath)) mkdirSync(destPath, { recursive: true });
|
|
3429
|
-
const subResult = copyDirPreserveExisting(srcPath, destPath);
|
|
3537
|
+
const subResult = copyDirPreserveExisting(srcPath, destPath, manifest, join(basePath, entry.name));
|
|
3538
|
+
added.push(...subResult.added);
|
|
3430
3539
|
updated.push(...subResult.updated);
|
|
3431
3540
|
preserved.push(...subResult.preserved);
|
|
3432
|
-
} else
|
|
3433
|
-
|
|
3434
|
-
|
|
3435
|
-
|
|
3541
|
+
} else {
|
|
3542
|
+
const relativePath = join(basePath, entry.name);
|
|
3543
|
+
if (!existsSync(destPath)) {
|
|
3544
|
+
copyFileSync(srcPath, destPath);
|
|
3545
|
+
added.push(relativePath);
|
|
3546
|
+
} else if (fileModificationStatus(destPath, relativePath, manifest) === "unmodified") {
|
|
3547
|
+
copyFileSync(srcPath, destPath);
|
|
3548
|
+
updated.push(relativePath);
|
|
3549
|
+
} else preserved.push(relativePath);
|
|
3550
|
+
}
|
|
3436
3551
|
}
|
|
3437
3552
|
return {
|
|
3553
|
+
added,
|
|
3438
3554
|
updated,
|
|
3439
3555
|
preserved
|
|
3440
3556
|
};
|
|
@@ -3451,10 +3567,20 @@ function getAllFiles(dir, base = "") {
|
|
|
3451
3567
|
}
|
|
3452
3568
|
return files;
|
|
3453
3569
|
}
|
|
3570
|
+
function findUpgradeOrphans(installedFiles, templateFiles) {
|
|
3571
|
+
return installedFiles.filter((file) => {
|
|
3572
|
+
if (file === ".version") return false;
|
|
3573
|
+
if (file === MANIFEST_FILE) return false;
|
|
3574
|
+
if (PRESERVE_FILES.includes(file)) return false;
|
|
3575
|
+
if (file === ".env" || file.endsWith(".env")) return false;
|
|
3576
|
+
return !templateFiles.includes(file);
|
|
3577
|
+
});
|
|
3578
|
+
}
|
|
3454
3579
|
async function upgradeCommand(rawOptions = {}) {
|
|
3455
3580
|
const options = parseOptions(UpgradeOptionsSchema, rawOptions);
|
|
3456
3581
|
if (process.argv.includes("--quiet")) return;
|
|
3457
3582
|
const opencodeDir = join(process.cwd(), ".opencode");
|
|
3583
|
+
const manifest = loadManifest(opencodeDir);
|
|
3458
3584
|
if (!existsSync(opencodeDir)) {
|
|
3459
3585
|
notInitialized();
|
|
3460
3586
|
return;
|
|
@@ -3495,8 +3621,11 @@ async function upgradeCommand(rawOptions = {}) {
|
|
|
3495
3621
|
}
|
|
3496
3622
|
const s = p.spinner();
|
|
3497
3623
|
s.start("Upgrading");
|
|
3498
|
-
const result = copyDirWithPreserve(templateOpencode, opencodeDir, PRESERVE_FILES, PRESERVE_DIRS);
|
|
3499
|
-
if (versionInfo.latest)
|
|
3624
|
+
const result = copyDirWithPreserve(templateOpencode, opencodeDir, PRESERVE_FILES, PRESERVE_DIRS, manifest);
|
|
3625
|
+
if (versionInfo.latest) {
|
|
3626
|
+
writeFileSync(join(opencodeDir, ".version"), versionInfo.latest);
|
|
3627
|
+
generateManifest(opencodeDir, versionInfo.latest);
|
|
3628
|
+
}
|
|
3500
3629
|
s.stop("Done");
|
|
3501
3630
|
const patchMetadata = loadPatchMetadata(opencodeDir);
|
|
3502
3631
|
const patchCount = Object.keys(patchMetadata.patches).length;
|
|
@@ -3516,16 +3645,10 @@ async function upgradeCommand(rawOptions = {}) {
|
|
|
3516
3645
|
if (patchResults.conflicts > 0) p.log.warn(`${patchResults.conflicts} patch conflicts (see .opencode/patches/*.rej files)`);
|
|
3517
3646
|
}
|
|
3518
3647
|
if (result.updated.length > 0) p.log.success(`Updated ${result.updated.length} files`);
|
|
3648
|
+
if (result.added.length > 0) p.log.success(`Added ${result.added.length} files`);
|
|
3519
3649
|
if (result.preserved.length > 0) p.log.info(`Preserved ${result.preserved.length} user files`);
|
|
3520
3650
|
if (patchResults.success > 0) p.log.success(`Reapplied ${patchResults.success} patches`);
|
|
3521
|
-
const
|
|
3522
|
-
const templateFiles = getAllFiles(templateOpencode);
|
|
3523
|
-
const orphans = installedFiles.filter((file) => {
|
|
3524
|
-
if (file === ".version") return false;
|
|
3525
|
-
if (PRESERVE_FILES.includes(file)) return false;
|
|
3526
|
-
if (file === ".env" || file.endsWith(".env")) return false;
|
|
3527
|
-
return !templateFiles.includes(file);
|
|
3528
|
-
});
|
|
3651
|
+
const orphans = findUpgradeOrphans(getAllFiles(opencodeDir), getAllFiles(templateOpencode));
|
|
3529
3652
|
if (orphans.length > 0) {
|
|
3530
3653
|
p.log.info(`${color.yellow(orphans.length.toString())} files found that are not in the template (orphans).`);
|
|
3531
3654
|
if (options.pruneAll) {
|
|
@@ -1 +1 @@
|
|
|
1
|
-
0.
|
|
1
|
+
0.18.3
|
|
@@ -21,13 +21,15 @@ Your loop: **perceive → create → verify → ship.**
|
|
|
21
21
|
When instructions conflict:
|
|
22
22
|
|
|
23
23
|
1. **Security** — never expose or invent credentials
|
|
24
|
-
2. **Anti-hallucination** — verify before asserting
|
|
24
|
+
2. **Anti-hallucination** — verify before asserting; if context is missing, prefer lookup over guessing; if you must proceed without full context, label assumptions explicitly and choose a reversible action
|
|
25
25
|
3. **User intent** — do what was asked, simply and directly
|
|
26
26
|
4. **Agency preservation** — "likely difficult" ≠ "impossible" ≠ "don't try"
|
|
27
27
|
5. This `AGENTS.md`
|
|
28
28
|
6. Memory (`memory-search`)
|
|
29
29
|
7. Project files and codebase evidence
|
|
30
30
|
|
|
31
|
+
If a newer user instruction conflicts with an earlier one, follow the newer instruction. Preserve earlier instructions that don't conflict.
|
|
32
|
+
|
|
31
33
|
---
|
|
32
34
|
|
|
33
35
|
## Operating Principles
|
|
@@ -70,18 +72,46 @@ When instructions conflict:
|
|
|
70
72
|
| `build.gradle` | `gradle compileJava` | `gradle checkstyleMain` | `gradle test` |
|
|
71
73
|
| `Makefile` | Check for `check`/`lint`/`test` targets | | |
|
|
72
74
|
|
|
75
|
+
### Tool Persistence
|
|
76
|
+
|
|
77
|
+
- Use tools whenever they materially improve correctness or completeness
|
|
78
|
+
- Don't stop early when another tool call would improve the result
|
|
79
|
+
- Keep calling tools until the task is complete **and** verification passes
|
|
80
|
+
- If a tool returns empty or partial results, retry with a different strategy before giving up (see Empty Result Recovery)
|
|
81
|
+
|
|
82
|
+
### Dependency Checks
|
|
83
|
+
|
|
84
|
+
- Before taking an action, check whether prerequisite discovery, lookup, or memory retrieval steps are required
|
|
85
|
+
- Don't skip prerequisite steps because the final action seems obvious
|
|
86
|
+
- If a task depends on the output of a prior step, resolve that dependency first
|
|
87
|
+
|
|
88
|
+
### Empty Result Recovery
|
|
89
|
+
|
|
90
|
+
If a lookup, search, or tool call returns empty, partial, or suspiciously narrow results:
|
|
91
|
+
|
|
92
|
+
1. Don't immediately conclude that no results exist
|
|
93
|
+
2. Try at least 1-2 fallback strategies (alternative query terms, broader filters, different source/tool)
|
|
94
|
+
3. Only then report "no results found" along with what strategies were attempted
|
|
95
|
+
|
|
96
|
+
### Completeness Tracking
|
|
97
|
+
|
|
98
|
+
- Treat a task as incomplete until all requested items are covered or explicitly marked `[blocked]`
|
|
99
|
+
- Maintain an internal checklist of deliverables (use TodoWrite for multi-step work)
|
|
100
|
+
- For lists, batches, or paginated results: determine expected scope, track processed items, confirm full coverage
|
|
101
|
+
- If any item is blocked by missing data, mark it `[blocked]` and state exactly what is missing
|
|
102
|
+
|
|
73
103
|
---
|
|
74
104
|
|
|
75
105
|
## Hard Constraints (Never Violate)
|
|
76
106
|
|
|
77
|
-
| Constraint | Rule
|
|
78
|
-
| ------------- |
|
|
79
|
-
| Security | Never expose/invent credentials
|
|
80
|
-
| Git Safety | Never force push main/master; never bypass hooks
|
|
81
|
-
| Git Restore | Never run `reset --hard`, `checkout .`, `clean -fd` without explicit user request
|
|
82
|
-
| Honesty | Never fabricate tool output; never guess URLs
|
|
83
|
-
| Paths | Use absolute paths for file operations
|
|
84
|
-
| Reversibility | Ask first before destructive/irreversible actions
|
|
107
|
+
| Constraint | Rule |
|
|
108
|
+
| ------------- | --------------------------------------------------------------------------------------------------------------------------------- |
|
|
109
|
+
| Security | Never expose/invent credentials |
|
|
110
|
+
| Git Safety | Never force push main/master; never bypass hooks |
|
|
111
|
+
| Git Restore | Never run `reset --hard`, `checkout .`, `clean -fd` without explicit user request |
|
|
112
|
+
| Honesty | Never fabricate tool output; never guess URLs; label inferences as inferences; if sources conflict, state the conflict explicitly |
|
|
113
|
+
| Paths | Use absolute paths for file operations |
|
|
114
|
+
| Reversibility | Ask first before destructive/irreversible actions |
|
|
85
115
|
|
|
86
116
|
---
|
|
87
117
|
|
|
@@ -17,7 +17,7 @@ GSD has **11 specialized agents** (~12,000 total lines) vs OpenCodeKit's **8 gen
|
|
|
17
17
|
| `gsd-phase-researcher` | ~300 | Domain research for specific phases | `scout` (91 lines) | Similar scope, GSD has more structured output |
|
|
18
18
|
| `gsd-research-synthesizer` | ~200 | Aggregates parallel research findings | _No equivalent_ | **Missing**: Multi-agent research aggregation |
|
|
19
19
|
| `gsd-plan-checker` | ~400 | Validates plans against requirements before execution | _No equivalent_ | **Missing**: Pre-execution plan validation |
|
|
20
|
-
| `gsd-roadmapper` | ~300 | Creates
|
|
20
|
+
| `gsd-roadmapper` | ~300 | Creates roadmap.md with phase structure | _No equivalent_ | **Missing**: Dedicated roadmap creation |
|
|
21
21
|
| `gsd-codebase-mapper` | ~500 | Analyzes existing codebase architecture | `explore` (65 lines) | Deeper analysis: stack, conventions, concerns, architecture |
|
|
22
22
|
| `gsd-project-researcher` | ~250 | Project-level ecosystem research | `scout` | Overlaps with phase-researcher |
|
|
23
23
|
| `gsd-integration-checker` | ~200 | Validates integrations work correctly | _No equivalent_ | **Missing**: Integration validation agent |
|
|
@@ -401,14 +401,14 @@ Recommendation: Add as skill or plan agent mode
|
|
|
401
401
|
#### gsd-roadmapper (~300 lines)
|
|
402
402
|
|
|
403
403
|
```markdown
|
|
404
|
-
Creates
|
|
404
|
+
Creates roadmap.md with phase structure.
|
|
405
405
|
|
|
406
406
|
Outputs:
|
|
407
407
|
|
|
408
|
-
-
|
|
408
|
+
- project.md (vision)
|
|
409
409
|
- REQUIREMENTS.md (scoped v1/v2)
|
|
410
|
-
-
|
|
411
|
-
-
|
|
410
|
+
- roadmap.md (phases with goals)
|
|
411
|
+
- state.md (memory)
|
|
412
412
|
|
|
413
413
|
Recommendation: Add `/roadmap` command
|
|
414
414
|
```
|
|
@@ -510,7 +510,7 @@ research-lead (NEW - optional):
|
|
|
510
510
|
|
|
511
511
|
/roadmap:
|
|
512
512
|
description: Create project roadmap
|
|
513
|
-
creates:
|
|
513
|
+
creates: project.md, REQUIREMENTS.md, roadmap.md, state.md
|
|
514
514
|
ported_from: gsd:new-project (planning phase only)
|
|
515
515
|
|
|
516
516
|
/verify:
|
|
@@ -82,109 +82,6 @@ Implement requested work, verify with fresh evidence, and coordinate subagents o
|
|
|
82
82
|
- Re-run typecheck/lint/tests after meaningful edits
|
|
83
83
|
- If verification fails twice on the same approach, **escalate with learnings**, not frustration
|
|
84
84
|
|
|
85
|
-
## Memory Ritual
|
|
86
|
-
|
|
87
|
-
Memory makes knowledge persistent. Follow this ritual every session:
|
|
88
|
-
|
|
89
|
-
### Ground Phase — Load Context
|
|
90
|
-
|
|
91
|
-
```typescript
|
|
92
|
-
// 1. Search for relevant past work
|
|
93
|
-
memory_search({ query: "<task keywords>", limit: 5 });
|
|
94
|
-
|
|
95
|
-
// 2. Check recent handoffs
|
|
96
|
-
memory_read({ file: "handoffs/last" });
|
|
97
|
-
```
|
|
98
|
-
|
|
99
|
-
### Transform Phase — Record Discoveries
|
|
100
|
-
|
|
101
|
-
```typescript
|
|
102
|
-
observation({
|
|
103
|
-
type: "pattern",
|
|
104
|
-
title: "Brief description",
|
|
105
|
-
narrative: "Context and reasoning...",
|
|
106
|
-
facts: "key, facts, here",
|
|
107
|
-
concepts: "searchable, keywords",
|
|
108
|
-
files_modified: "src/file.ts",
|
|
109
|
-
});
|
|
110
|
-
```
|
|
111
|
-
|
|
112
|
-
## Rules
|
|
113
|
-
|
|
114
|
-
- Be concise, direct, and evidence-based
|
|
115
|
-
- Never claim success without fresh verification output
|
|
116
|
-
- Ask before irreversible actions (close bead, commit, push, force operations)
|
|
117
|
-
- Never bypass hooks or safety checks
|
|
118
|
-
- Never fabricate tool output
|
|
119
|
-
- Never use secrets not explicitly provided
|
|
120
|
-
|
|
121
|
-
## Skills
|
|
122
|
-
|
|
123
|
-
Always load:
|
|
124
|
-
|
|
125
|
-
- beads (task tracking)
|
|
126
|
-
- verification-before-completion
|
|
127
|
-
|
|
128
|
-
Load contextually when needed:
|
|
129
|
-
|
|
130
|
-
| Work Type | Skills |
|
|
131
|
-
| ---------------------- | ---------------------------------------------- |
|
|
132
|
-
| Planning artifacts | prd-task, executing-plans, writing-plans, prd |
|
|
133
|
-
| Debug/bug work | systematic-debugging, root-cause-tracing |
|
|
134
|
-
| Test-heavy work | test-driven-development, testing-anti-patterns |
|
|
135
|
-
| UI work | frontend-design, react-best-practices |
|
|
136
|
-
| Parallel orchestration | swarm-coordination, beads-bridge |
|
|
137
|
-
|
|
138
|
-
## Execution Mode
|
|
139
|
-
|
|
140
|
-
- **Sequential** by default for coupled work
|
|
141
|
-
- **Parallel** for 3+ independent, file-disjoint tasks using `task(...)`
|
|
142
|
-
|
|
143
|
-
## Deviation Rules (Auto-Fix Without Permission)
|
|
144
|
-
|
|
145
|
-
While executing, you WILL discover work not in the plan. Apply these rules automatically:
|
|
146
|
-
|
|
147
|
-
**RULE 1: Auto-fix bugs** (broken behavior, errors, logic issues)
|
|
148
|
-
|
|
149
|
-
- Wrong queries, type errors, null pointer exceptions
|
|
150
|
-
- Fix inline → verify → continue task
|
|
151
|
-
|
|
152
|
-
**RULE 2: Auto-add missing critical functionality** (validation, auth, error handling)
|
|
153
|
-
|
|
154
|
-
- Missing input validation, no auth on protected routes
|
|
155
|
-
- No error handling, missing null checks
|
|
156
|
-
|
|
157
|
-
**RULE 3: Auto-fix blocking issues** (missing deps, wrong types, broken imports)
|
|
158
|
-
|
|
159
|
-
- Missing dependency, wrong types, broken imports
|
|
160
|
-
|
|
161
|
-
**RULE 4: ASK about architectural changes** (new tables, library switches, major refactors)
|
|
162
|
-
|
|
163
|
-
- STOP → report to user with: what found, proposed change, impact
|
|
164
|
-
|
|
165
|
-
## Output
|
|
166
|
-
|
|
167
|
-
Report in this order:
|
|
168
|
-
|
|
169
|
-
1. **Task results** (done/pending/blockers)
|
|
170
|
-
2. **Verification command results** (fresh evidence)
|
|
171
|
-
3. **Review findings** (if review run)
|
|
172
|
-
4. **Next recommended command** (`/plan`, `/ship`, `/pr`, etc.)
|
|
173
|
-
5. **Reset checkpoint** — what was learned, what remains
|
|
174
|
-
|
|
175
|
-
> _"No cathedral. No country. Just pulse."_
|
|
176
|
-
> Build. Verify. Ship. Repeat.
|
|
177
|
-
|
|
178
|
-
- Read files before editing
|
|
179
|
-
- Delegate when work is large, uncertain, or cross-domain
|
|
180
|
-
|
|
181
|
-
### Verification as Calibration
|
|
182
|
-
|
|
183
|
-
- No success claims without fresh verification output
|
|
184
|
-
- Verification failures are **signals, not condemnations** — adjust and proceed
|
|
185
|
-
- Re-run typecheck/lint/tests after meaningful edits
|
|
186
|
-
- If verification fails twice on the same approach, **escalate with learnings**, not frustration
|
|
187
|
-
|
|
188
85
|
## Ritual Structure
|
|
189
86
|
|
|
190
87
|
Each task follows a five-phase ritual. Constraints create the container; the ritual transforms intent into output.
|
|
@@ -197,6 +94,12 @@ Each task follows a five-phase ritual. Constraints create the container; the rit
|
|
|
197
94
|
| **Release** | Output results and evidence | Report changes, show verification output, cite file:line refs | Brief pause to ensure completeness |
|
|
198
95
|
| **Reset** | Checkpoint and prepare for next | Update memory if needed, confirm bead state, plan next iteration | Silent assessment: "What did I learn?" |
|
|
199
96
|
|
|
97
|
+
Ground phase worktree check:
|
|
98
|
+
|
|
99
|
+
- Check for active worktree: `cat .beads/artifacts/$BEAD_ID/worktree.txt 2>/dev/null`
|
|
100
|
+
- If worktree exists, verify it's valid: `git worktree list | grep "$WORKTREE_PATH"`
|
|
101
|
+
- If valid, operate from worktree directory
|
|
102
|
+
|
|
200
103
|
## Memory Ritual
|
|
201
104
|
|
|
202
105
|
Memory makes knowledge persistent. Follow this ritual every session:
|
|
@@ -430,6 +333,7 @@ When constraints tighten:
|
|
|
430
333
|
|
|
431
334
|
- For long tasks, send brief updates at major milestones
|
|
432
335
|
- Keep each update to one short sentence
|
|
336
|
+
- Never open with filler ("Got it", "Sure", "Great question") — start with what you're doing or what you found
|
|
433
337
|
- Updates are **breath points** — brief, then back to work
|
|
434
338
|
|
|
435
339
|
## Delegation
|
|
@@ -27,58 +27,6 @@ You are a general implementation subagent. You output minimal in-scope changes p
|
|
|
27
27
|
|
|
28
28
|
Execute clear, low-complexity coding tasks quickly (typically 1-3 files) and report concrete results.
|
|
29
29
|
|
|
30
|
-
## Principles
|
|
31
|
-
|
|
32
|
-
### Default to Action
|
|
33
|
-
|
|
34
|
-
- If scope is clear, execute immediately
|
|
35
|
-
- Don't wait for permission on reversible changes
|
|
36
|
-
|
|
37
|
-
### Scope Discipline
|
|
38
|
-
|
|
39
|
-
- If scope grows beyond 3 files or requires architecture decisions, **delegate**
|
|
40
|
-
- When requirements are underspecified, choose the safest reasonable default
|
|
41
|
-
|
|
42
|
-
### Verification
|
|
43
|
-
|
|
44
|
-
- Verify with relevant checks before claiming done
|
|
45
|
-
- Never revert or discard user changes you did not create
|
|
46
|
-
|
|
47
|
-
## Rules
|
|
48
|
-
|
|
49
|
-
- Read code before editing
|
|
50
|
-
- Keep changes minimal and in-scope
|
|
51
|
-
- Ask before irreversible actions (commit, push, destructive ops)
|
|
52
|
-
|
|
53
|
-
## Workflow
|
|
54
|
-
|
|
55
|
-
1. Read relevant files
|
|
56
|
-
2. Confirm scope is small and clear
|
|
57
|
-
3. Make surgical edits
|
|
58
|
-
4. Run validation (lint/typecheck/tests as applicable)
|
|
59
|
-
5. Report changed files with `file:line` references
|
|
60
|
-
|
|
61
|
-
## Output
|
|
62
|
-
|
|
63
|
-
- What changed
|
|
64
|
-
- Validation evidence
|
|
65
|
-
- Assumptions/defaults chosen (if any)
|
|
66
|
-
- Remaining risks/blockers (if any)
|
|
67
|
-
|
|
68
|
-
# General Agent
|
|
69
|
-
|
|
70
|
-
**Purpose**: Surgical implementer — small scope, fast execution, concrete results.
|
|
71
|
-
|
|
72
|
-
> _"If the lever is small, pull it quickly. If the lever is large, escalate."_
|
|
73
|
-
|
|
74
|
-
## Identity
|
|
75
|
-
|
|
76
|
-
You are a general implementation subagent. You output minimal in-scope changes plus validation evidence only.
|
|
77
|
-
|
|
78
|
-
## Task
|
|
79
|
-
|
|
80
|
-
Execute clear, low-complexity coding tasks quickly (typically 1-3 files) and report concrete results.
|
|
81
|
-
|
|
82
30
|
## Personality
|
|
83
31
|
|
|
84
32
|
- Concise, direct, and friendly
|
|
@@ -190,6 +190,16 @@ must_haves:
|
|
|
190
190
|
- Level 2+: New library not in package.json, external API, "choose/select/evaluate" in description
|
|
191
191
|
- Level 3: "architecture/design/system", multiple external services, data modeling, auth design
|
|
192
192
|
|
|
193
|
+
### Research Execution (Level 2+)
|
|
194
|
+
|
|
195
|
+
For any research at Level 2 or above, follow the 3-pass pattern:
|
|
196
|
+
|
|
197
|
+
1. **Plan**: List 3-6 sub-questions the research must answer
|
|
198
|
+
2. **Retrieve**: Search each sub-question; follow 1-2 second-order leads per question
|
|
199
|
+
3. **Synthesize**: Resolve contradictions between sources, write findings with citations
|
|
200
|
+
|
|
201
|
+
Stop only when further searching is unlikely to change the conclusion.
|
|
202
|
+
|
|
193
203
|
## Context Budget Rules
|
|
194
204
|
|
|
195
205
|
**Quality Degradation Curve:**
|